Search: Sending a request that fails to parse can cause file leaks, closes #270.

This commit is contained in:
kimchy 2010-07-21 16:59:58 +03:00
parent 0cb97e4044
commit 8ec7ee66a9
2 changed files with 35 additions and 31 deletions

View File

@ -286,38 +286,43 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
private SearchContext createContext(InternalSearchRequest request) throws ElasticSearchException { private SearchContext createContext(InternalSearchRequest request) throws ElasticSearchException {
IndexService indexService = indicesService.indexServiceSafe(request.index()); IndexService indexService = indicesService.indexServiceSafe(request.index());
IndexShard indexShard = indexService.shardSafe(request.shardId()); IndexShard indexShard = indexService.shardSafe(request.shardId());
Engine.Searcher engineSearcher = indexShard.searcher();
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.state().nodes().localNodeId(), request.index(), request.shardId()); SearchShardTarget shardTarget = new SearchShardTarget(clusterService.state().nodes().localNodeId(), request.index(), request.shardId());
Engine.Searcher engineSearcher = indexShard.searcher();
SearchContext context = new SearchContext(idGenerator.incrementAndGet(), shardTarget, request.numberOfShards(), request.timeout(), request.types(), engineSearcher, indexService, scriptService); SearchContext context = new SearchContext(idGenerator.incrementAndGet(), shardTarget, request.numberOfShards(), request.timeout(), request.types(), engineSearcher, indexService, scriptService);
context.scroll(request.scroll()); try {
context.scroll(request.scroll());
parseSource(context, request.source(), request.sourceOffset(), request.sourceLength()); parseSource(context, request.source(), request.sourceOffset(), request.sourceLength());
parseSource(context, request.extraSource(), request.extraSourceOffset(), request.extraSourceLength()); parseSource(context, request.extraSource(), request.extraSourceOffset(), request.extraSourceLength());
// if the from and size are still not set, default them // if the from and size are still not set, default them
if (context.from() == -1) { if (context.from() == -1) {
context.from(0); context.from(0);
}
if (context.size() == -1) {
context.size(10);
}
// pre process
dfsPhase.preProcess(context);
queryPhase.preProcess(context);
fetchPhase.preProcess(context);
// compute the context keep alive
TimeValue keepAlive = defaultKeepAlive;
if (request.scroll() != null && request.scroll().keepAlive() != null) {
keepAlive = request.scroll().keepAlive();
}
context.keepAlive(keepAlive);
context.accessed(timerService.estimatedTimeInMillis());
context.keepAliveTimeout(timerService.newTimeout(new KeepAliveTimerTask(context), keepAlive, TimerService.ExecutionType.DEFAULT));
} catch (RuntimeException e) {
context.release();
throw e;
} }
if (context.size() == -1) {
context.size(10);
}
// pre process
dfsPhase.preProcess(context);
queryPhase.preProcess(context);
fetchPhase.preProcess(context);
// compute the context keep alive
TimeValue keepAlive = defaultKeepAlive;
if (request.scroll() != null && request.scroll().keepAlive() != null) {
keepAlive = request.scroll().keepAlive();
}
context.keepAlive(keepAlive);
context.accessed(timerService.estimatedTimeInMillis());
context.keepAliveTimeout(timerService.newTimeout(new KeepAliveTimerTask(context), keepAlive, TimerService.ExecutionType.DEFAULT));
return context; return context;
} }

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.internal;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.timer.Timeout; import org.elasticsearch.common.timer.Timeout;
@ -45,7 +44,6 @@ import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResult;
import java.io.IOException;
import java.util.List; import java.util.List;
/** /**
@ -136,15 +134,16 @@ public class SearchContext implements Releasable {
} }
@Override public boolean release() throws ElasticSearchException { @Override public boolean release() throws ElasticSearchException {
// we should close this searcher, since its a new one we create each time, and we use the IndexReader
try { try {
searcher.close(); searcher.close();
} catch (IOException e) { } catch (Exception e) {
// ignore this exception // ignore any exception here
} catch (AlreadyClosedException e) {
// ignore this as well
} }
engineSearcher.release(); engineSearcher.release();
keepAliveTimeout.cancel(); if (keepAliveTimeout != null) {
keepAliveTimeout.cancel();
}
return true; return true;
} }