ClassCastException during percolation query, closes #1905.

This commit is contained in:
Shay Banon 2012-05-03 17:57:20 +03:00
parent 8db27cc5bc
commit aeae380258
6 changed files with 55 additions and 36 deletions

View File

@ -123,7 +123,9 @@ public class SimpleBloomCache extends AbstractIndexComponent implements BloomCac
synchronized (creationMutex) { synchronized (creationMutex) {
fieldCache = cache.get(reader.getCoreCacheKey()); fieldCache = cache.get(reader.getCoreCacheKey());
if (fieldCache == null) { if (fieldCache == null) {
((SegmentReader) reader).addCoreClosedListener(this); if (reader instanceof SegmentReader) {
((SegmentReader) reader).addCoreClosedListener(this);
}
fieldCache = ConcurrentCollections.newConcurrentMap(); fieldCache = ConcurrentCollections.newConcurrentMap();
cache.put(reader.getCoreCacheKey(), fieldCache); cache.put(reader.getCoreCacheKey(), fieldCache);
} }

View File

@ -112,7 +112,9 @@ public abstract class AbstractConcurrentMapFieldDataCache extends AbstractIndexC
fieldDataCache = cache.get(reader.getCoreCacheKey()); fieldDataCache = cache.get(reader.getCoreCacheKey());
if (fieldDataCache == null) { if (fieldDataCache == null) {
fieldDataCache = buildFieldDataMap(); fieldDataCache = buildFieldDataMap();
((SegmentReader) reader).addCoreClosedListener(this); if (reader instanceof SegmentReader) {
((SegmentReader) reader).addCoreClosedListener(this);
}
cache.put(reader.getCoreCacheKey(), fieldDataCache); cache.put(reader.getCoreCacheKey(), fieldDataCache);
} }
} }

View File

@ -166,7 +166,7 @@ public class WeightedFilterCache extends AbstractIndexComponent implements Filte
if (cacheValue == null) { if (cacheValue == null) {
if (!cache.seenReaders.containsKey(reader.getCoreCacheKey())) { if (!cache.seenReaders.containsKey(reader.getCoreCacheKey())) {
Boolean previous = cache.seenReaders.putIfAbsent(reader.getCoreCacheKey(), Boolean.TRUE); Boolean previous = cache.seenReaders.putIfAbsent(reader.getCoreCacheKey(), Boolean.TRUE);
if (previous == null) { if (previous == null && (reader instanceof SegmentReader)) {
((SegmentReader) reader).addCoreClosedListener(cache); ((SegmentReader) reader).addCoreClosedListener(cache);
cache.seenReadersCount.inc(); cache.seenReadersCount.inc();
} }

View File

@ -109,7 +109,9 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
continue; continue;
} }
((SegmentReader) reader).addCoreClosedListener(this); if (reader instanceof SegmentReader) {
((SegmentReader) reader).addCoreClosedListener(this);
}
HashMap<String, TypeBuilder> readerBuilder = new HashMap<String, TypeBuilder>(); HashMap<String, TypeBuilder> readerBuilder = new HashMap<String, TypeBuilder>();
builders.put(reader.getCoreCacheKey(), readerBuilder); builders.put(reader.getCoreCacheKey(), readerBuilder);

View File

@ -249,7 +249,7 @@ public class PercolatorExecutor extends AbstractIndexComponent {
} }
} }
public synchronized void addQuery(String name, Query query) { private synchronized void addQuery(String name, Query query) {
Preconditions.checkArgument(query != null, "query must be provided for percolate request"); Preconditions.checkArgument(query != null, "query must be provided for percolate request");
this.queries = MapBuilder.newMapBuilder(queries).put(name, query).immutableMap(); this.queries = MapBuilder.newMapBuilder(queries).put(name, query).immutableMap();
} }
@ -349,43 +349,46 @@ public class PercolatorExecutor extends AbstractIndexComponent {
} }
final IndexSearcher searcher = memoryIndex.createSearcher(); final IndexSearcher searcher = memoryIndex.createSearcher();
List<String> matches = new ArrayList<String>(); List<String> matches = new ArrayList<String>();
if (request.query() == null) {
Lucene.ExistsCollector collector = new Lucene.ExistsCollector(); try {
for (Map.Entry<String, Query> entry : queries.entrySet()) { if (request.query() == null) {
collector.reset(); Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
for (Map.Entry<String, Query> entry : queries.entrySet()) {
collector.reset();
try {
searcher.search(entry.getValue(), collector);
} catch (IOException e) {
logger.warn("[" + entry.getKey() + "] failed to execute query", e);
}
if (collector.exists()) {
matches.add(entry.getKey());
}
}
} else {
IndexService percolatorIndex = indicesService.indexService(PercolatorService.INDEX_NAME);
if (percolatorIndex == null) {
throw new PercolateIndexUnavailable(new Index(PercolatorService.INDEX_NAME));
}
if (percolatorIndex.numberOfShards() == 0) {
throw new PercolateIndexUnavailable(new Index(PercolatorService.INDEX_NAME));
}
IndexShard percolatorShard = percolatorIndex.shard(0);
Engine.Searcher percolatorSearcher = percolatorShard.searcher();
try { try {
searcher.search(entry.getValue(), collector); percolatorSearcher.searcher().search(request.query(), new QueryCollector(logger, queries, searcher, percolatorIndex, matches));
} catch (IOException e) { } catch (IOException e) {
logger.warn("[" + entry.getKey() + "] failed to execute query", e); logger.warn("failed to execute", e);
} } finally {
percolatorSearcher.release();
if (collector.exists()) {
matches.add(entry.getKey());
} }
} }
} else { } finally {
IndexService percolatorIndex = indicesService.indexService(PercolatorService.INDEX_NAME); // explicitly clear the reader, since we can only register on callback on SegmentReader
if (percolatorIndex == null) { indexCache.clear(searcher.getIndexReader());
throw new PercolateIndexUnavailable(new Index(PercolatorService.INDEX_NAME));
}
if (percolatorIndex.numberOfShards() == 0) {
throw new PercolateIndexUnavailable(new Index(PercolatorService.INDEX_NAME));
}
IndexShard percolatorShard = percolatorIndex.shard(0);
Engine.Searcher percolatorSearcher = percolatorShard.searcher();
try {
percolatorSearcher.searcher().search(request.query(), new QueryCollector(logger, queries, searcher, percolatorIndex, matches));
} catch (IOException e) {
logger.warn("failed to execute", e);
} finally {
percolatorSearcher.release();
}
} }
indexCache.clear(searcher.getIndexReader());
return new Response(matches, request.doc().mappersAdded()); return new Response(matches, request.doc().mappersAdded());
} }

View File

@ -36,6 +36,7 @@ import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.engine.IndexEngineModule; import org.elasticsearch.index.engine.IndexEngineModule;
import org.elasticsearch.index.mapper.MapperServiceModule; import org.elasticsearch.index.mapper.MapperServiceModule;
import org.elasticsearch.index.percolator.PercolatorExecutor; import org.elasticsearch.index.percolator.PercolatorExecutor;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.IndexQueryParserModule; import org.elasticsearch.index.query.IndexQueryParserModule;
import org.elasticsearch.index.settings.IndexSettingsModule; import org.elasticsearch.index.settings.IndexSettingsModule;
import org.elasticsearch.index.similarity.SimilarityModule; import org.elasticsearch.index.similarity.SimilarityModule;
@ -45,6 +46,7 @@ import org.elasticsearch.threadpool.ThreadPoolModule;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.*;
@ -60,7 +62,7 @@ public class PercolatorExecutorTests {
@BeforeClass @BeforeClass
public void buildPercolatorService() { public void buildPercolatorService() {
Settings settings = ImmutableSettings.settingsBuilder() Settings settings = ImmutableSettings.settingsBuilder()
.put("index.cache.filter.type", "none") //.put("index.cache.filter.type", "none")
.build(); .build();
Index index = new Index("test"); Index index = new Index("test");
Injector injector = new ModulesBuilder().add( Injector injector = new ModulesBuilder().add(
@ -128,5 +130,13 @@ public class PercolatorExecutorTests {
percolate = percolatorExecutor.percolate(new PercolatorExecutor.SourceRequest("type1", source)); percolate = percolatorExecutor.percolate(new PercolatorExecutor.SourceRequest("type1", source));
assertThat(percolate.matches(), hasSize(1)); assertThat(percolate.matches(), hasSize(1));
assertThat(percolate.matches(), hasItems("test1")); assertThat(percolate.matches(), hasItems("test1"));
// add a range query (cached)
// add a query
percolatorExecutor.addQuery("test1", constantScoreQuery(FilterBuilders.rangeFilter("field2").from("value").includeLower(true)));
percolate = percolatorExecutor.percolate(new PercolatorExecutor.SourceRequest("type1", source));
assertThat(percolate.matches(), hasSize(1));
assertThat(percolate.matches(), hasItem("test1"));
} }
} }