Query to get count of every employee's hobbies is broken - returns NPE.
We should see the count of each employee's hobbies as a result of the query.
What steps will reproduce the problem?
Start asterix using managix, two logical NC's and one CC on a standalone setup.
Run the following statements from Web UI.
drop dataverse test if exists;
create dataverse test;
use dataverse test;
create type Emp as open {
id : int32,
name : string,
hobbies:{{string}}
}
create dataset Employee(Emp) primary key id;
insert into dataset Employee ({
"id":123,"name":"Kevin","hobbies":{{"scuba","hiking","biking","fishing","reading
","swimming"}}});
insert into dataset Employee ({ "id":171,"name":"Karl
Jr","hobbies":{{"soccer","ping-pong","surfing","fishing","boxing","swimming"}}})
;
for $l in dataset Employee
let $c := count(for $m in $l.hobbies return $m)
return {"name":$l.name,"hobby-count":$c}
edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException:
edu.uci.ics.hyracks.api.exceptions.HyracksException: Job Failed
INFO: Optimized Plan:
distribute result [%0->$$7]
-- DISTRIBUTE_RESULT |PARTITIONED|
exchange
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
project ([$$7])
-- STREAM_PROJECT |PARTITIONED|
assign [$$7] <- [function-call: asterix:closed-record-constructor, Args:[AString: {name}, %0->$$11, AString: {hobby-count}, function-call: asterix:count, Args:[%0->$$6]]]
-- ASSIGN |PARTITIONED|
project ([$$6, $$11])
-- STREAM_PROJECT |PARTITIONED|
subplan {
aggregate [$$6] <- [function-call: asterix:listify, Args:[%0->$$1]]
-- AGGREGATE |LOCAL|
unnest $$1 <- function-call: asterix:scan-collection, Args:[%0->$$10]
-- UNNEST |LOCAL|
nested tuple source
-- NESTED_TUPLE_SOURCE |LOCAL|
}
-- SUBPLAN |PARTITIONED|
project ([$$10, $$11])
-- STREAM_PROJECT |PARTITIONED|
assign [$$11, $$10] <- [function-call: asterix:field-access-by-index, Args:[%0->$$0, AInt32: {1}], function-call: asterix:field-access-by-index, Args:[%0->$$0, AInt32: {2}]]
-- ASSIGN |PARTITIONED|
project ([$$0])
-- STREAM_PROJECT |PARTITIONED|
exchange
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
data-scan []<-[$$9, $$0] <- test:Employee
-- DATASOURCE_SCAN |PARTITIONED|
exchange
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
empty-tuple-source
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
Stack trace from CC.log
java.lang.NullPointerException
at edu.uci.ics.asterix.runtime.aggregates.scalar.AbstractScalarAggregateDescriptor$1.createEvaluator(AbstractScalarAggregateDescriptor.java:35)
at edu.uci.ics.asterix.runtime.evaluators.common.ClosedRecordConstructorEvalFactory.createEvaluator(ClosedRecordConstructorEvalFactory.java:51)
at edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter$ScalarEvaluatorFactoryAdapter.createScalarEvaluator(LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.java:87)
at edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory$1.open(AssignRuntimeFactory.java:97)
at edu.uci.ics.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.open(StreamProjectRuntimeFactory.java:53)
at edu.uci.ics.hyracks.algebricks.runtime.operators.meta.SubplanRuntimeFactory$1.open(SubplanRuntimeFactory.java:158)
at edu.uci.ics.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.open(StreamProjectRuntimeFactory.java:53)
at edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory$1.open(AssignRuntimeFactory.java:103)
at edu.uci.ics.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.open(StreamProjectRuntimeFactory.java:53)
at edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.open(AlgebricksMetaOperatorDescriptor.java:131)
at edu.uci.ics.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.open(IndexSearchOperatorNodePushable.java:79)
at edu.uci.ics.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.open(EmptyTupleSourceRuntimeFactory.java:50)
at edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:104)
at edu.uci.ics.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:81)
at edu.uci.ics.hyracks.control.nc.Task.run(Task.java:232)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:722)
Original issue reported on code.google.com by khfaraaz82 on 30 Apr 2013 at 1:10
Original issue reported on code.google.com by
khfaraaz82
on 30 Apr 2013 at 1:10