fix wrong handling of doc ids to load when request is not serialized
This commit is contained in:
parent
cd08a71fae
commit
9549b9c2d3
|
@ -66,6 +66,9 @@ task explodedDist(dependsOn: [configurations.distLib], description: 'Builds a mi
|
||||||
include 'README.textile'
|
include 'README.textile'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ant.delete { fileset(dir: explodedDistLibDir, includes: "$archivesBaseName-*-javadoc.jar") }
|
||||||
|
ant.delete { fileset(dir: explodedDistLibDir, includes: "$archivesBaseName-*-sources.jar") }
|
||||||
|
|
||||||
ant.chmod(dir: "$explodedDistDir/bin", perm: "ugo+rx", includes: "**/*")
|
ant.chmod(dir: "$explodedDistDir/bin", perm: "ugo+rx", includes: "**/*")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -59,7 +59,6 @@ import org.elasticsearch.util.timer.Timeout;
|
||||||
import org.elasticsearch.util.timer.TimerTask;
|
import org.elasticsearch.util.timer.TimerTask;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
@ -260,7 +259,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||||
public FetchSearchResult executeFetchPhase(FetchSearchRequest request) throws ElasticSearchException {
|
public FetchSearchResult executeFetchPhase(FetchSearchRequest request) throws ElasticSearchException {
|
||||||
SearchContext context = findContext(request.id());
|
SearchContext context = findContext(request.id());
|
||||||
try {
|
try {
|
||||||
context.docIdsToLoad(request.docIds());
|
context.docIdsToLoad(request.docIds(), 0, request.docIdsSize());
|
||||||
fetchPhase.execute(context);
|
fetchPhase.execute(context);
|
||||||
if (context.scroll() == null) {
|
if (context.scroll() == null) {
|
||||||
freeContext(request.id());
|
freeContext(request.id());
|
||||||
|
@ -366,7 +365,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||||
TopDocs topDocs = context.queryResult().topDocs();
|
TopDocs topDocs = context.queryResult().topDocs();
|
||||||
if (topDocs.scoreDocs.length < context.from()) {
|
if (topDocs.scoreDocs.length < context.from()) {
|
||||||
// no more docs...
|
// no more docs...
|
||||||
context.docIdsToLoad(EMPTY_DOC_IDS);
|
context.docIdsToLoad(EMPTY_DOC_IDS, 0, 0);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
int totalSize = context.from() + context.size();
|
int totalSize = context.from() + context.size();
|
||||||
|
@ -380,10 +379,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||||
}
|
}
|
||||||
counter++;
|
counter++;
|
||||||
}
|
}
|
||||||
if (counter < context.size()) {
|
context.docIdsToLoad(docIdsToLoad, 0, counter);
|
||||||
docIdsToLoad = Arrays.copyOfRange(docIdsToLoad, 0, counter);
|
|
||||||
}
|
|
||||||
context.docIdsToLoad(docIdsToLoad);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void processScroll(InternalScrollSearchRequest request, SearchContext context) {
|
private void processScroll(InternalScrollSearchRequest request, SearchContext context) {
|
||||||
|
|
|
@ -139,7 +139,7 @@ public class SearchPhaseController {
|
||||||
for (ShardDoc shardDoc : shardDocs) {
|
for (ShardDoc shardDoc : shardDocs) {
|
||||||
ExtTIntArrayList list = result.get(shardDoc.shardTarget());
|
ExtTIntArrayList list = result.get(shardDoc.shardTarget());
|
||||||
if (list == null) {
|
if (list == null) {
|
||||||
list = new ExtTIntArrayList();
|
list = new ExtTIntArrayList(); // can't be shared!, uses unsafe on it later on
|
||||||
result.put(shardDoc.shardTarget(), list);
|
result.put(shardDoc.shardTarget(), list);
|
||||||
}
|
}
|
||||||
list.add(shardDoc.docId());
|
list.add(shardDoc.docId());
|
||||||
|
|
|
@ -65,9 +65,10 @@ public class FetchPhase implements SearchPhase {
|
||||||
public void execute(SearchContext context) {
|
public void execute(SearchContext context) {
|
||||||
FieldSelector fieldSelector = buildFieldSelectors(context);
|
FieldSelector fieldSelector = buildFieldSelectors(context);
|
||||||
|
|
||||||
InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoad().length];
|
InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()];
|
||||||
int index = 0;
|
int index = 0;
|
||||||
for (int docId : context.docIdsToLoad()) {
|
for (int docIdIdx = context.docIdsToLoadFrom(); docIdIdx < context.docIdsToLoadSize(); docIdIdx++) {
|
||||||
|
int docId = context.docIdsToLoad()[docIdIdx];
|
||||||
Document doc = loadDocument(context, fieldSelector, docId);
|
Document doc = loadDocument(context, fieldSelector, docId);
|
||||||
Uid uid = extractUid(context, doc);
|
Uid uid = extractUid(context, doc);
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.util.trove.ExtTIntArrayList;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public class FetchSearchRequest implements Streamable {
|
public class FetchSearchRequest implements Streamable {
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ public class FetchSearchRequest implements Streamable {
|
||||||
|
|
||||||
private int[] docIds;
|
private int[] docIds;
|
||||||
|
|
||||||
private transient int size;
|
private int size;
|
||||||
|
|
||||||
public FetchSearchRequest() {
|
public FetchSearchRequest() {
|
||||||
}
|
}
|
||||||
|
@ -60,11 +60,15 @@ public class FetchSearchRequest implements Streamable {
|
||||||
return docIds;
|
return docIds;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public int docIdsSize() {
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
@Override public void readFrom(StreamInput in) throws IOException {
|
@Override public void readFrom(StreamInput in) throws IOException {
|
||||||
id = in.readLong();
|
id = in.readLong();
|
||||||
size = in.readVInt();
|
size = in.readVInt();
|
||||||
docIds = new int[size];
|
docIds = new int[size];
|
||||||
for (int i = 0; i < docIds.length; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
docIds[i] = in.readVInt();
|
docIds[i] = in.readVInt();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,6 +90,10 @@ public class SearchContext implements Releasable {
|
||||||
|
|
||||||
private int[] docIdsToLoad;
|
private int[] docIdsToLoad;
|
||||||
|
|
||||||
|
private int docsIdsToLoadFrom;
|
||||||
|
|
||||||
|
private int docsIdsToLoadSize;
|
||||||
|
|
||||||
private SearchContextFacets facets;
|
private SearchContextFacets facets;
|
||||||
|
|
||||||
private SearchContextHighlight highlight;
|
private SearchContextHighlight highlight;
|
||||||
|
@ -295,8 +299,18 @@ public class SearchContext implements Releasable {
|
||||||
return docIdsToLoad;
|
return docIdsToLoad;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchContext docIdsToLoad(int[] docIdsToLoad) {
|
public int docIdsToLoadFrom() {
|
||||||
|
return docsIdsToLoadFrom;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int docIdsToLoadSize() {
|
||||||
|
return docsIdsToLoadSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) {
|
||||||
this.docIdsToLoad = docIdsToLoad;
|
this.docIdsToLoad = docIdsToLoad;
|
||||||
|
this.docsIdsToLoadFrom = docsIdsToLoadFrom;
|
||||||
|
this.docsIdsToLoadSize = docsIdsToLoadSize;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue