Closed neoform closed 7 years ago
Thanks for reporting this bug.
How many nodes do you have in your cluster and how many replicas has this index?
Could you try to find the stack trace of this exception in the Elasticsearch logs?
Could you try to reproduce this bug again after adding a line that contains -ea
to your config/jvm.options
and restarting. This will tell the JVM to enable assertions and potentially help have more information about where invariants get violated.
OK, I just reviewed conditions under which this exception may happen and it looks like the only option is that there is an orphan child document at the end of a segment. This is bad since it should never happen given how Elasticsearch enforces the block structure when nested mappings are used.
Could you check whether your index is corrupt by running the following command:
for index_dir in data/*/nodes/*/indices/*/*/index; do java -ea -cp lib/lucene-core-6.4.1.jar org.apache.lucene.index.CheckIndex -fast $index_dir; done
You may need to update the paths and replace data
with your data directory and lib
with your lib directory depending on how you installed Elasticsearch. This command will verify the checksums of all your index files and report in case there are mismatches. If it reports errors then it would mean your index is corrupt as its content does not match what was written, and otherwise it would mean there likely a bug in how Elasticsearch builds nested blocks.
ian@sling:~# curl -XGET http://localhost:9200/_cat/shards?v
index shard prirep state docs store ip node
product 0 p STARTED 11048788 4.2gb 127.0.0.1 ddjlYjR
ian@sling:~# curl -XGET http://localhost:9200/_cat/nodes?v
ip heap.percent ram.percent cpu load_1m load_5m load_15m node.role master name
127.0.0.1 21 98 0 0.00 0.03 0.05 mdi * ddjlYjR
ian@sling:~# curl -XGET http://localhost:9200/_cat/allocation?v
shards disk.indices disk.used disk.avail disk.total disk.percent host ip node
1 4.2gb 88.4gb 333.7gb 422.1gb 20 127.0.0.1 127.0.0.1 ddjlYjR
ian@sling:~# curl -XGET http://localhost:9200/_cat/health?v
epoch timestamp cluster status node.total node.data shards pri relo init unassign pending_tasks max_task_wait_time active_shards_percent
1487681377 07:49:37 elasticsearch green 1 1 1 1 0 0 0 0 - 100.0%
From the error logs:
[2017-02-21T07:53:57,509][DEBUG][o.e.a.s.TransportSearchAction] [ddjlYjR] [product][0], node[ddjlYjROS8e3YTe4HT730A], [P], s[STARTED], a[id=WpOjQnjLSQGl0aHEkd0sSA]: Failed to execute [SearchRequest{searchType=QUERY_AND_FETCH, indices=[product], indicesOptions=IndicesOptions[id=38, ignore_unavailable=false, allow_no_indices=true, expand_wildcards_open=true, expand_wildcards_closed=false, allow_alisases_to_multiple_indices=true, forbid_closed_indices=true], types=[product], routing='null', preference='null', requestCache=null, scroll=null, source={
"from" : 0,
"size" : 40,
"timeout" : "6s",
"query" : {
"function_score" : {
"query" : {
"bool" : {
"filter" : [
{
"nested" : {
"query" : {
"bool" : {
"must" : [
{
"multi_match" : {
"query" : "television, audio, phone, dvd, tv, iphone",
"fields" : [
"product.brand.keywords_external^1.0",
"product.brand.keywords_internal^1.0",
"product.brand.name^1.0",
"product.description^4.0",
"product.name^10.0"
],
"type" : "best_fields",
"operator" : "OR",
"slop" : 0,
"prefix_length" : 0,
"max_expansions" : 50,
"minimum_should_match" : "0",
"lenient" : false,
"zero_terms_query" : "NONE",
"boost" : 1.0
}
}
],
"filter" : [
{
"term" : {
"product.status_id" : {
"value" : 1,
"boost" : 1.0
}
}
},
{
"nested" : {
"query" : {
"bool" : {
"filter" : [
{
"terms" : {
"product.category.id" : [
870
],
"boost" : 1.0
}
}
],
"disable_coord" : false,
"adjust_pure_negative" : true,
"boost" : 1.0
}
},
"path" : "product.category",
"ignore_unmapped" : false,
"score_mode" : "avg",
"boost" : 1.0
}
},
{
"nested" : {
"query" : {
"bool" : {
"filter" : [
{
"terms" : {
"product.brand.id" : [
22861
],
"boost" : 1.0
}
}
],
"disable_coord" : false,
"adjust_pure_negative" : true,
"boost" : 1.0
}
},
"path" : "product.brand",
"ignore_unmapped" : false,
"score_mode" : "avg",
"boost" : 1.0
}
}
],
"disable_coord" : false,
"adjust_pure_negative" : true,
"boost" : 1.0
}
},
"path" : "product",
"ignore_unmapped" : false,
"score_mode" : "avg",
"boost" : 1.0
}
}
],
"disable_coord" : false,
"adjust_pure_negative" : true,
"boost" : 1.0
}
},
"functions" : [
{
"filter" : {
"match_all" : {
"boost" : 1.0
}
},
"field_value_factor" : {
"field" : "product_count",
"factor" : 50.0,
"modifier" : "log1p"
}
},
{
"filter" : {
"match_all" : {
"boost" : 1.0
}
},
"field_value_factor" : {
"field" : "rank_weight",
"factor" : 1.0,
"missing" : 1.0,
"modifier" : "reciprocal"
}
}
],
"score_mode" : "multiply",
"max_boost" : 3.4028235E38,
"boost" : 1.0
}
},
"_source" : {
"includes" : [
"mpn",
"cheapest_product_id",
"price",
"price_discount_percent",
"price_discount_amount",
"product_count",
"rank_weight",
"product.id",
"product.price",
"product.status_id"
],
"excludes" : [ ]
},
"aggregations" : {
"price" : {
"filter" : {
"bool" : {
"filter" : [
{
"nested" : {
"query" : {
"bool" : {
"must" : [
{
"multi_match" : {
"query" : "television, audio, phone, dvd, tv, iphone",
"fields" : [
"product.brand.keywords_external^1.0",
"product.brand.keywords_internal^1.0",
"product.brand.name^1.0",
"product.description^4.0",
"product.name^10.0"
],
"type" : "best_fields",
"operator" : "OR",
"slop" : 0,
"prefix_length" : 0,
"max_expansions" : 50,
"minimum_should_match" : "0",
"lenient" : false,
"zero_terms_query" : "NONE",
"boost" : 1.0
}
}
],
"filter" : [
{
"term" : {
"product.status_id" : {
"value" : 1,
"boost" : 1.0
}
}
},
{
"nested" : {
"query" : {
"bool" : {
"filter" : [
{
"terms" : {
"product.category.id" : [
870
],
"boost" : 1.0
}
}
],
"disable_coord" : false,
"adjust_pure_negative" : true,
"boost" : 1.0
}
},
"path" : "product.category",
"ignore_unmapped" : false,
"score_mode" : "avg",
"boost" : 1.0
}
},
{
"nested" : {
"query" : {
"bool" : {
"filter" : [
{
"terms" : {
"product.brand.id" : [
22861
],
"boost" : 1.0
}
}
],
"disable_coord" : false,
"adjust_pure_negative" : true,
"boost" : 1.0
}
},
"path" : "product.brand",
"ignore_unmapped" : false,
"score_mode" : "avg",
"boost" : 1.0
}
}
],
"disable_coord" : false,
"adjust_pure_negative" : true,
"boost" : 1.0
}
},
"path" : "product",
"ignore_unmapped" : false,
"score_mode" : "avg",
"boost" : 1.0
}
}
],
"disable_coord" : false,
"adjust_pure_negative" : true,
"boost" : 1.0
}
},
"aggregations" : {
"priceMin" : {
"min" : {
"field" : "price"
}
},
"priceMax" : {
"max" : {
"field" : "price"
}
}
}
},
"brands" : {
"nested" : {
"path" : "product.brand"
}
org.elasticsearch.transport.RemoteTransportException: [ddjlYjR][127.0.0.1:9300][indices:data/read/search[phase/query+fetch]]
Caused by: org.elasticsearch.search.query.QueryPhaseExecutionException: Query Failed [Failed to execute main query]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:405) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:106) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:246) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:360) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:322) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:319) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:69) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$7.doRun(TransportService.java:610) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:596) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) [elasticsearch-5.2.1.jar:5.2.1]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_121]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_121]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_121]
Caused by: java.lang.IllegalStateException: Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=4423794, class org.apache.lucene.search.ConjunctionScorer
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer.checkOrthogonal(ToParentBlockJoinQuery.java:403) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer.access$400(ToParentBlockJoinQuery.java:206) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer$1.nextDoc(ToParentBlockJoinQuery.java:286) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer$1.advance(ToParentBlockJoinQuery.java:382) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.elasticsearch.common.lucene.Lucene$3.get(Lucene.java:785) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.filter.FilterAggregator$1.collect(FilterAggregator.java:63) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.BucketsAggregator.collectExistingBucket(BucketsAggregator.java:80) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.BucketsAggregator.collectBucket(BucketsAggregator.java:72) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.nested.NestedAggregator$1.collect(NestedAggregator.java:89) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.LeafBucketCollector$2.collect(LeafBucketCollector.java:67) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.LeafBucketCollector.collect(LeafBucketCollector.java:82) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.apache.lucene.search.MultiCollector$MultiLeafCollector.collect(MultiCollector.java:174) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.TimeLimitingCollector$1.collect(TimeLimitingCollector.java:158) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.Weight$DefaultBulkScorer.scoreAll(Weight.java:221) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.Weight$DefaultBulkScorer.score(Weight.java:172) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.BulkScorer.score(BulkScorer.java:39) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:669) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:473) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:379) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:106) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:246) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:360) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:322) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:319) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:69) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$7.doRun(TransportService.java:610) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:596) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) ~[elasticsearch-5.2.1.jar:5.2.1]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) ~[?:1.8.0_121]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) ~[?:1.8.0_121]
at java.lang.Thread.run(Thread.java:745) ~[?:1.8.0_121]
[2017-02-21T07:53:57,522][DEBUG][o.e.a.s.TransportSearchAction] [ddjlYjR] All shards failed for phase: [query_fetch]
org.elasticsearch.transport.RemoteTransportException: [ddjlYjR][127.0.0.1:9300][indices:data/read/search[phase/query+fetch]]
Caused by: org.elasticsearch.search.query.QueryPhaseExecutionException: Query Failed [Failed to execute main query]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:405) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:106) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:246) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:360) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:322) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:319) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:69) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$7.doRun(TransportService.java:610) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:596) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) [elasticsearch-5.2.1.jar:5.2.1]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_121]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_121]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_121]
Caused by: java.lang.IllegalStateException: Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=4423794, class org.apache.lucene.search.ConjunctionScorer
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer.checkOrthogonal(ToParentBlockJoinQuery.java:403) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer.access$400(ToParentBlockJoinQuery.java:206) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer$1.nextDoc(ToParentBlockJoinQuery.java:286) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer$1.advance(ToParentBlockJoinQuery.java:382) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.elasticsearch.common.lucene.Lucene$3.get(Lucene.java:785) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.filter.FilterAggregator$1.collect(FilterAggregator.java:63) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.BucketsAggregator.collectExistingBucket(BucketsAggregator.java:80) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.BucketsAggregator.collectBucket(BucketsAggregator.java:72) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.nested.NestedAggregator$1.collect(NestedAggregator.java:89) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.LeafBucketCollector$2.collect(LeafBucketCollector.java:67) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.LeafBucketCollector.collect(LeafBucketCollector.java:82) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.apache.lucene.search.MultiCollector$MultiLeafCollector.collect(MultiCollector.java:174) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.TimeLimitingCollector$1.collect(TimeLimitingCollector.java:158) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.Weight$DefaultBulkScorer.scoreAll(Weight.java:221) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.Weight$DefaultBulkScorer.score(Weight.java:172) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.BulkScorer.score(BulkScorer.java:39) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:669) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:473) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:379) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:106) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:246) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:360) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:322) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:319) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:69) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$7.doRun(TransportService.java:610) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:596) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) ~[elasticsearch-5.2.1.jar:5.2.1]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) ~[?:1.8.0_121]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) ~[?:1.8.0_121]
at java.lang.Thread.run(Thread.java:745) ~[?:1.8.0_121]
[2017-02-21T07:53:57,525][WARN ][r.suppressed ] path: /product/product/_search, params: {size=40, index=product, from=0, type=product, timeout=6s}
org.elasticsearch.action.search.SearchPhaseExecutionException: all shards failed
at org.elasticsearch.action.search.AbstractSearchAsyncAction.onFirstPhaseResult(AbstractSearchAsyncAction.java:208) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.AbstractSearchAsyncAction.access$100(AbstractSearchAsyncAction.java:52) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.AbstractSearchAsyncAction$1.onFailure(AbstractSearchAsyncAction.java:143) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.ActionListenerResponseHandler.handleException(ActionListenerResponseHandler.java:51) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$ContextRestoreResponseHandler.handleException(TransportService.java:1024) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$DirectResponseChannel.processException(TransportService.java:1126) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$DirectResponseChannel.sendResponse(TransportService.java:1104) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$7.onFailure(TransportService.java:621) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.onFailure(ThreadContext.java:581) [elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:39) [elasticsearch-5.2.1.jar:5.2.1]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_121]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_121]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_121]
Caused by: org.elasticsearch.transport.RemoteTransportException: [ddjlYjR][127.0.0.1:9300][indices:data/read/search[phase/query+fetch]]
Caused by: org.elasticsearch.search.query.QueryPhaseExecutionException: Query Failed [Failed to execute main query]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:405) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:106) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:246) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:360) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:322) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:319) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:69) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$7.doRun(TransportService.java:610) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:596) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) ~[elasticsearch-5.2.1.jar:5.2.1]
... 3 more
Caused by: java.lang.IllegalStateException: Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=4423794, class org.apache.lucene.search.ConjunctionScorer
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer.checkOrthogonal(ToParentBlockJoinQuery.java:403) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer.access$400(ToParentBlockJoinQuery.java:206) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer$1.nextDoc(ToParentBlockJoinQuery.java:286) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer$1.advance(ToParentBlockJoinQuery.java:382) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.elasticsearch.common.lucene.Lucene$3.get(Lucene.java:785) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.filter.FilterAggregator$1.collect(FilterAggregator.java:63) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.BucketsAggregator.collectExistingBucket(BucketsAggregator.java:80) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.BucketsAggregator.collectBucket(BucketsAggregator.java:72) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.nested.NestedAggregator$1.collect(NestedAggregator.java:89) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.LeafBucketCollector$2.collect(LeafBucketCollector.java:67) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.LeafBucketCollector.collect(LeafBucketCollector.java:82) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.apache.lucene.search.MultiCollector$MultiLeafCollector.collect(MultiCollector.java:174) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.TimeLimitingCollector$1.collect(TimeLimitingCollector.java:158) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.Weight$DefaultBulkScorer.scoreAll(Weight.java:221) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.Weight$DefaultBulkScorer.score(Weight.java:172) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.BulkScorer.score(BulkScorer.java:39) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:669) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:473) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:379) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:106) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:246) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:360) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:322) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:319) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:69) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$7.doRun(TransportService.java:610) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:596) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) ~[elasticsearch-5.2.1.jar:5.2.1]
... 3 more
I'm assuming I'll need to install something else for that integrity check command.
root@sling:~# for index_dir in data/*/nodes/*/indices/*/*/index; do java -ea -cp lib/lucene-core-6.4.1.jar org.apache.lucene.index.CheckIndex -fast $index_dir; done
Error: Could not find or load main class org.apache.lucene.index.CheckIndex
Does the lib/lucene-core-6.4.1.jar
file exist?
Ok so at least there is an interesting information in those stack traces: I expected the issue was caused by the query but it is actually caused by the aggregation.
Thanks, I found the problem. This is due to how you are using a filter
aggregation with a query that matches top-level documents below a nested
aggregation that matches nested documents. We should improve validation in this area but in the mean time if you can explain to me what you want your aggregation to do, I can help you fix it.
Sorry, I just noticed I had given you the stack trace for the 'full' query. My original bug was for a slimmed down version of the query that contained fewer aggregations.
Here's the first query's stack trace (the one in my first post):
[2017-02-21T08:39:30,017][ERROR][o.e.b.ElasticsearchUncaughtExceptionHandler] [] fatal error in thread [elasticsearch[ddjlYjR][search][T#11]], exiting
java.lang.AssertionError: null
at org.apache.lucene.search.join.ToParentBlockJoinQuery$BlockJoinScorer$1.nextDoc(ToParentBlockJoinQuery.java:289) ~[lucene-join-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:44:23]
at org.apache.lucene.search.Weight$DefaultBulkScorer.scoreAll(Weight.java:219) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.Weight$DefaultBulkScorer.score(Weight.java:172) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.BulkScorer.score(BulkScorer.java:39) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.LRUQueryCache.cacheIntoRoaringDocIdSet(LRUQueryCache.java:530) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.LRUQueryCache.cacheImpl(LRUQueryCache.java:506) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.LRUQueryCache$CachingWrapperWeight.cache(LRUQueryCache.java:708) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.LRUQueryCache$CachingWrapperWeight.scorer(LRUQueryCache.java:743) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.elasticsearch.indices.IndicesQueryCache$CachingWeightWrapper.scorer(IndicesQueryCache.java:155) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.bucket.filter.FilterAggregator.getLeafCollector(FilterAggregator.java:59) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.AggregatorBase.getLeafCollector(AggregatorBase.java:149) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.AggregatorBase.getLeafCollector(AggregatorBase.java:148) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.BucketCollector$2.getLeafCollector(BucketCollector.java:75) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.aggregations.BucketCollector$2.getLeafCollector(BucketCollector.java:69) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.apache.lucene.search.MultiCollector.getLeafCollector(MultiCollector.java:121) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.TimeLimitingCollector.getLeafCollector(TimeLimitingCollector.java:144) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.FilterCollector.getLeafCollector(FilterCollector.java:40) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.elasticsearch.search.query.CancellableCollector.getLeafCollector(CancellableCollector.java:61) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:660) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:473) ~[lucene-core-6.4.1.jar:6.4.1 72f75b2503fa0aa4f0aff76d439874feb923bb0e - jpountz - 2017-02-01 14:43:32]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:379) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:106) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:246) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:360) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:322) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.action.search.SearchTransportService$9.messageReceived(SearchTransportService.java:319) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:69) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.transport.TransportService$7.doRun(TransportService.java:610) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:596) ~[elasticsearch-5.2.1.jar:5.2.1]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37) ~[elasticsearch-5.2.1.jar:5.2.1]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) ~[?:1.8.0_121]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) ~[?:1.8.0_121]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_121]
If it would help debug this, I'm ok with giving you a full copy of my index (zip the ES data dir), since this is not production data (and it's not sensitive data).
I think I can help without looking at the data, the issue is with the nested
andfilter
aggregations: you have a nested
aggregation that switches the scope to products.brands
while the filter matches root documents. I suspect what you actually want to do is to make the nested
aggregation a sub-aggregation of the filter
aggregation rather than the other way around. Something like this (not tested):
GET _search
{
"timeout": "6s",
"index": "product",
"type": "product",
"from": 0,
"size": 40,
"body": {
"_source": [
"mpn",
"cheapest_product_id",
"price",
"price_discount_percent",
"price_discount_amount",
"product_count",
"rank_weight",
"product.id",
"product.price",
"product.status_id"
],
"query": {
"function_score": {
"query": {
"bool": {
"filter": [
{
"nested": {
"path": "product",
"query": {
"bool": {
"filter": [
{
"term": {
"product.status_id": 1
}
},
{
"nested": {
"path": "product.category",
"query": {
"bool": {
"filter": [
{
"terms": {
"product.category.id": [
870
]
}
}
]
}
}
}
},
{
"nested": {
"path": "product.brand",
"query": {
"bool": {
"filter": [
{
"terms": {
"product.brand.id": [
23083
]
}
}
]
}
}
}
}
],
"must": [
{
"multi_match": {
"query": "television, audio, phone, dvd, tv",
"fields": [
"product.name^10",
"product.description^4",
"product.brand.name",
"product.brand.keywords_internal",
"product.brand.keywords_external"
],
"minimum_should_match": 0
}
}
]
}
}
}
}
]
}
},
"functions": [
{
"field_value_factor": {
"field": "product_count",
"factor": 50,
"modifier": "log1p"
}
},
{
"field_value_factor": {
"field": "rank_weight",
"modifier": "reciprocal",
"missing": 1
}
}
]
}
},
"aggs": {
"brands": {
"filter": {
"bool": {
"filter": [
{
"nested": {
"path": "product",
"query": {
"bool": {
"filter": [
{
"term": {
"product.status_id": 1
}
},
{
"nested": {
"path": "product.category",
"query": {
"bool": {
"filter": [
{
"terms": {
"product.category.id": [
870
]
}
}
]
}
}
}
}
],
"must": [
{
"multi_match": {
"query": "television, audio, phone, dvd, tv",
"fields": [
"product.name^10",
"product.description^4",
"product.brand.name",
"product.brand.keywords_internal",
"product.brand.keywords_external"
],
"minimum_should_match": 0
}
}
]
}
}
}
}
]
}
},
"aggs": {
"brands": {
"nested": {
"path": "product.brand"
},
"aggs": {
"brands": {
"terms": {
"field": "product.brand.id",
"size": 30
}
}
}
}
}
}
}
}
}
I believe I had tried that originally and either got the same results, or got incorrect results, which is why I think I switched it.
My goal with the aggregate is to repeat the same query, minus the brand filter, that way I can get a list of all brands that would otherwise apply to the results had I not limited to a specific one in the main query.
I'll give that a try. Though I'm still at a loss as to why this query works 2-3 times then fails forever after that (until ES is restarted). I'm guessing it has to do what you were saying above about orphaned docs?
Yeah, when I was doing that query, the bucket counts weren't the correct counts (eg, the result count if I were to replace the brand id in the main query with the one shown in the brands aggregation). The only way I was able to get the correct values was with the first query I posted above. :(
That said, the version you just showed doesn't break my instance of ES, so maybe that's a plus? ;)
Though I'm still at a loss as to why this query works 2-3 times then fails forever after that (until ES is restarted). I'm guessing it has to do what you were saying above about orphaned docs?
This is due to caching. On the first runs, the filter aggregation only evaluates the filter against documents that match the query. However after it has been used multiple times, Elasticsearch thinks the filter is worth caching, and the caching process evaluates the filter against the whole index, which highlights this issue.
This makes sense, I figured it was something like that, but wasn't 100% sure. So is the bug with my query, or with ES (both?).
I can try to figure out a different way to get the results I want, I just need to be sure the problem is on my end.
Actually, I just realized that you were right with your suggested query, it was just missing one detail, I wanted that agg to be global scope..
This gives me what I want:
{
"timeout": "6s",
"index": "product",
"type": "product",
"from": 0,
"size": 40,
"body": {
"_source": [
"mpn",
"cheapest_product_id",
"price",
"price_discount_percent",
"price_discount_amount",
"product_count",
"rank_weight",
"product.id",
"product.price",
"product.status_id"
],
"query": {
"function_score": {
"query": {
"bool": {
"filter": [
{
"nested": {
"path": "product",
"query": {
"bool": {
"filter": [
{
"term": {
"product.status_id": 1
}
},
{
"nested": {
"path": "product.category",
"query": {
"bool": {
"filter": [
{
"terms": {
"product.category.id": [
870
]
}
}
]
}
}
}
},
{
"nested": {
"path": "product.brand",
"query": {
"bool": {
"filter": [
{
"terms": {
"product.brand.id": [
22861
]
}
}
]
}
}
}
}
],
"must": [
{
"multi_match": {
"query": "television, audio, phone, dvd, tv, iphone",
"fields": [
"product.name^10",
"product.description^4",
"product.brand.name",
"product.brand.keywords_internal",
"product.brand.keywords_external"
],
"minimum_should_match": 0
}
}
]
}
}
}
}
]
}
},
"functions": [
{
"field_value_factor": {
"field": "product_count",
"factor": 50,
"modifier": "log1p"
}
},
{
"field_value_factor": {
"field": "rank_weight",
"modifier": "reciprocal",
"missing": 1
}
}
]
}
},
"aggs": {
"price": {
"filter": {
"bool": {
"filter": [
{
"nested": {
"path": "product",
"query": {
"bool": {
"filter": [
{
"term": {
"product.status_id": 1
}
},
{
"nested": {
"path": "product.category",
"query": {
"bool": {
"filter": [
{
"terms": {
"product.category.id": [
870
]
}
}
]
}
}
}
},
{
"nested": {
"path": "product.brand",
"query": {
"bool": {
"filter": [
{
"terms": {
"product.brand.id": [
22861
]
}
}
]
}
}
}
}
],
"must": [
{
"multi_match": {
"query": "television, audio, phone, dvd, tv, iphone",
"fields": [
"product.name^10",
"product.description^4",
"product.brand.name",
"product.brand.keywords_internal",
"product.brand.keywords_external"
],
"minimum_should_match": 0
}
}
]
}
}
}
}
]
}
},
"aggs": {
"priceMin": {
"min": {
"field": "price"
}
},
"priceMax": {
"max": {
"field": "price"
}
}
}
},
"brands": {
"global": {},
"aggs": {
"brands": {
"filter": {
"bool": {
"filter": [
{
"nested": {
"path": "product",
"query": {
"bool": {
"filter": [
{
"term": {
"product.status_id": 1
}
},
{
"nested": {
"path": "product.category",
"query": {
"bool": {
"filter": [
{
"terms": {
"product.category.id": [
870
]
}
}
]
}
}
}
}
],
"must": [
{
"multi_match": {
"query": "television, audio, phone, dvd, tv, iphone",
"fields": [
"product.name^10",
"product.description^4",
"product.brand.name",
"product.brand.keywords_internal",
"product.brand.keywords_external"
],
"minimum_should_match": 0
}
}
]
}
}
}
}
]
}
},
"aggs": {
"brands": {
"nested": {
"path": "product.brand"
},
"aggs": {
"brands": {
"terms": {
"field": "product.brand.id",
"size": 30
}
}
}
}
}
}
}
}
}
}
}
So is the bug with my query, or with ES (both?).
Both I would say. I opened #23290 in order to try to detect invalid queries at parsing time rather than relying on search-time validation, which gives cryptic error messages. I will close this issue now if you don't mind.
Elasticsearch version: 5.2.1
Plugins installed: []
JVM version: 1.8.0_121
OS version: Debian 8
Description of the problem including expected versus actual behavior:
Steps to reproduce:
Provide logs (if relevant):
[2017-02-21 02:48:41] log.INFO: Request Success: {"method":"POST","uri":"http://127.0.0.1:9200/product/product/_search?timeout=6s&from=0&size=40","headers":{"host":["127.0.0.1:9200"],"user-agent":["Guzzle/3.9.2 curl/7.38.0 PHP/7.0.16-1~dotdeb+8.1"],"content-length":[1570]},"HTTP code":200,"duration":0.080847} {"file":"/var/www/xxxx/library/Elasticsearch/Connections/AbstractConnection.php","line":131,"class":"Elasticsearch\Connections\AbstractConnection","function":"logRequestSuccess"}
[2017-02-21 02:48:42] log.WARNING: Request Failure: {"method":"POST","uri":"http://127.0.0.1:9200/product/product/_search?timeout=6s&from=0&size=40","headers":{"host":["127.0.0.1:9200"],"user-agent":["Guzzle/3.9.2 curl/7.38.0 PHP/7.0.16-1~dotdeb+8.1"],"content-length":[1570]},"HTTP code":500,"duration":0.08146,"error":"Server error response\n[status code] 500\n[reason phrase] Internal Server Error\n[url] http://127.0.0.1:9200/product/product/_search?timeout=6s&from=0&size=40"} {"file":"/var/www/xxxx/library/Elasticsearch/Connections/AbstractConnection.php","line":186,"class":"Elasticsearch\Connections\AbstractConnection","function":"logRequestFail"}
[2017-02-21 02:48:42] log.WARNING: Response ["{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=2147483647, class org.apache.lucene.search.ConjunctionScorer\"}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"query_fetch\",\"grouped\":true,\"failed_shards\":[{\"shard\":0,\"index\":\"product\",\"node\":\"ddjlYjROS8e3YTe4HT730A\",\"reason\":{\"type\":\"illegal_state_exception\",\"reason\":\"Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=2147483647, class org.apache.lucene.search.ConjunctionScorer\"}}],\"caused_by\":{\"type\":\"illegal_state_exception\",\"reason\":\"Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=2147483647, class org.apache.lucene.search.ConjunctionScorer\"}},\"status\":500}"] {"file":"/var/www/xxxx/library/Elasticsearch/Connections/AbstractConnection.php","line":189,"class":"Elasticsearch\Connections\AbstractConnection","function":"logRequestFail"}
[2017-02-21 02:48:42] log.ERROR: 500 Server Exception: Server error response [status code] 500 [reason phrase] Internal Server Error [url] http://127.0.0.1:9200/product/product/_search?timeout=6s&from=0&size=40 {"error":{"root_cause":[{"type":"illegal_state_exception","reason":"Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=2147483647, class org.apache.lucene.search.ConjunctionScorer"}],"type":"search_phase_execution_exception","reason":"all shards failed","phase":"query_fetch","grouped":true,"failed_shards":[{"shard":0,"index":"product","node":"ddjlYjROS8e3YTe4HT730A","reason":{"type":"illegal_state_exception","reason":"Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=2147483647, class org.apache.lucene.search.ConjunctionScorer"}}],"caused_by":{"type":"illegal_state_exception","reason":"Child query must not match same docs with parent filter. Combine them as must clauses (+) to find a problem doc. docId=2147483647, class org.apache.lucene.search.ConjunctionScorer"}},"status":500} [] {"file":"/var/www/xxxx/library/Elasticsearch/Connections/GuzzleConnection.php","line":229,"class":"Elasticsearch\Connections\GuzzleConnection","function":"process5xxError"}
Query:
After the first few executions:
Then on the 3rd try: