Upgrade to Lucene r1662607

Closes #9915.

Squashed commit of the following:

commit cfa59f5a3f03d9d1b432980dcee6495447c1e7ea
Author: Robert Muir <rmuir@apache.org>
Date:   Fri Feb 27 12:10:16 2015 -0500

    add missing null check

commit 62fe5403068c730c0e0b6fd1ab1a0246eeef6220
Author: Robert Muir <rmuir@apache.org>
Date:   Fri Feb 27 11:31:53 2015 -0500

    Disable ExtrasFS for now, until we hook all this in properly in a separate issue.

commit 822795c57c5cf846423fad443c2327c4ed0094ac
Author: Adrien Grand <jpountz@gmail.com>
Date:   Fri Feb 27 10:12:02 2015 +0100

    Fix PercolatorTests.

commit 98b2a0a7d8298648125c9a367cb7e31b3ec7d51b
Author: Adrien Grand <jpountz@gmail.com>
Date:   Fri Feb 27 09:27:11 2015 +0100

    Fix ChildrenQueryTests.

commit 9b99656fc56bbd01c9afe22baffae3c37bb48a71
Author: Robert Muir <rmuir@apache.org>
Date:   Thu Feb 26 20:50:02 2015 -0500

    cutover apis, no work on test failures yet.
This commit is contained in:
Robert Muir 2015-02-27 12:50:46 -05:00
parent efbda318d0
commit 4ca57ab10a
21 changed files with 89 additions and 214 deletions

View File

@ -32,7 +32,7 @@
<properties> <properties>
<lucene.version>5.1.0</lucene.version> <lucene.version>5.1.0</lucene.version>
<lucene.maven.version>5.1.0-snapshot-1660560</lucene.maven.version> <lucene.maven.version>5.1.0-snapshot-1662607</lucene.maven.version>
<tests.jvms>auto</tests.jvms> <tests.jvms>auto</tests.jvms>
<tests.shuffle>true</tests.shuffle> <tests.shuffle>true</tests.shuffle>
<tests.output>onerror</tests.output> <tests.output>onerror</tests.output>
@ -56,7 +56,7 @@
<repository> <repository>
<id>lucene-snapshots</id> <id>lucene-snapshots</id>
<name>Lucene Snapshots</name> <name>Lucene Snapshots</name>
<url>https://download.elasticsearch.org/lucenesnapshots/1660560</url> <url>https://download.elasticsearch.org/lucenesnapshots/1662607</url>
</repository> </repository>
</repositories> </repositories>

View File

@ -61,7 +61,14 @@ public class XPostingsHighlighter {
// unnecessary. // unnecessary.
/** for rewriting: we don't want slow processing from MTQs */ /** for rewriting: we don't want slow processing from MTQs */
private static final IndexReader EMPTY_INDEXREADER = new MultiReader(); private static final IndexReader EMPTY_INDEXREADER;
static {
try {
EMPTY_INDEXREADER = new MultiReader();
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
/** Default maximum content size to process. Typically snippets /** Default maximum content size to process. Typically snippets
* closer to the beginning of the document better summarize its content */ * closer to the beginning of the document better summarize its content */

View File

@ -705,22 +705,6 @@ public class Lucene {
public int nextDoc() throws IOException { public int nextDoc() throws IOException {
throw new ElasticsearchIllegalStateException(message); throw new ElasticsearchIllegalStateException(message);
} }
@Override
public int nextPosition() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int startOffset() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int endOffset() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public BytesRef getPayload() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
}; };
} }

View File

@ -24,6 +24,8 @@ import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
/** /**
* A {@link org.apache.lucene.index.FilterDirectoryReader} that exposes * A {@link org.apache.lucene.index.FilterDirectoryReader} that exposes
* Elasticsearch internal per shard / index information like the shard ID. * Elasticsearch internal per shard / index information like the shard ID.
@ -33,7 +35,7 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
private final ShardId shardId; private final ShardId shardId;
private final FilterDirectoryReader.SubReaderWrapper wrapper; private final FilterDirectoryReader.SubReaderWrapper wrapper;
private ElasticsearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper, ShardId shardId) { private ElasticsearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper, ShardId shardId) throws IOException {
super(in, wrapper); super(in, wrapper);
this.wrapper = wrapper; this.wrapper = wrapper;
this.shardId = shardId; this.shardId = shardId;
@ -47,7 +49,7 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
} }
@Override @Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) { protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new ElasticsearchDirectoryReader(in, wrapper, shardId); return new ElasticsearchDirectoryReader(in, wrapper, shardId);
} }
@ -59,7 +61,7 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
* @param reader the reader to wrap * @param reader the reader to wrap
* @param shardId the shard ID to expose via the elasticsearch internal reader wrappers. * @param shardId the shard ID to expose via the elasticsearch internal reader wrappers.
*/ */
public static ElasticsearchDirectoryReader wrap(DirectoryReader reader, ShardId shardId) { public static ElasticsearchDirectoryReader wrap(DirectoryReader reader, ShardId shardId) throws IOException {
return new ElasticsearchDirectoryReader(reader, new SubReaderWrapper(shardId), shardId); return new ElasticsearchDirectoryReader(reader, new SubReaderWrapper(shardId), shardId);
} }

View File

@ -65,24 +65,4 @@ public class EmptyScorer extends Scorer {
public long cost() { public long cost() {
return 0; return 0;
} }
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
} }

View File

@ -82,28 +82,6 @@ abstract class CustomBoostFactorScorer extends Scorer {
return scorer.cost(); return scorer.cost();
} }
@Override
public int nextPosition() throws IOException {
return scorer.nextPosition();
}
@Override
public int startOffset() throws IOException {
return scorer.startOffset();
}
@Override
public int endOffset() throws IOException {
return scorer.endOffset();
}
@Override
public BytesRef getPayload() throws IOException {
return scorer.getPayload();
}
public interface NextDoc { public interface NextDoc {
public int advance(int target) throws IOException; public int advance(int target) throws IOException;

View File

@ -65,26 +65,6 @@ public class ScriptScoreFunction extends ScoreFunction {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override @Override
public long cost() { public long cost() {
return 1; return 1;

View File

@ -87,7 +87,9 @@ public class ChildrenConstantScoreQuery extends Query {
@Override @Override
public void extractTerms(Set<Term> terms) { public void extractTerms(Set<Term> terms) {
rewrittenChildQuery.extractTerms(terms); if (rewrittenChildQuery != null) {
rewrittenChildQuery.extractTerms(terms);
}
} }
@Override @Override

View File

@ -394,6 +394,11 @@ public class ChildrenQuery extends Query {
this.scores = this.bigArrays.newFloatArray(512, false); this.scores = this.bigArrays.newFloatArray(512, false);
} }
@Override
public boolean needsScores() {
return true;
}
@Override @Override
protected void newParent(long parentIdx) throws IOException { protected void newParent(long parentIdx) throws IOException {
scores = bigArrays.grow(scores, parentIdx + 1); scores = bigArrays.grow(scores, parentIdx + 1);
@ -642,26 +647,6 @@ public class ChildrenQuery extends Query {
public long cost() { public long cost() {
return parentsIterator.cost(); return parentsIterator.cost();
} }
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
} }
private static class ParentCountScorer extends ParentScorer { private static class ParentCountScorer extends ParentScorer {

View File

@ -74,24 +74,4 @@ public class ConstantScorer extends Scorer {
public long cost() { public long cost() {
return docIdSetIterator.cost(); return docIdSetIterator.cost();
} }
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
} }

View File

@ -344,25 +344,5 @@ public class ParentQuery extends Query {
public long cost() { public long cost() {
return childrenIterator.cost(); return childrenIterator.cost();
} }
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
} }
} }

View File

@ -416,26 +416,6 @@ public class TopChildrenQuery extends Query {
public final long cost() { public final long cost() {
return docs.length; return docs.length;
} }
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
} }
private static class ParentDocComparator implements Comparator<ParentDoc> { private static class ParentDocComparator implements Comparator<ParentDoc> {

View File

@ -235,26 +235,6 @@ public class IncludeNestedDocsQuery extends Query {
return parentScorer.freq(); return parentScorer.freq();
} }
@Override
public int nextPosition() throws IOException {
return parentScorer.nextPosition();
}
@Override
public int startOffset() throws IOException {
return parentScorer.startOffset();
}
@Override
public int endOffset() throws IOException {
return parentScorer.endOffset();
}
@Override
public BytesRef getPayload() throws IOException {
return parentScorer.getPayload();
}
@Override @Override
public int docID() { public int docID() {
return currentDoc; return currentDoc;

View File

@ -68,8 +68,8 @@ class MultiDocumentPercolatorIndex implements PercolatorIndex {
Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer(); Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer();
memoryIndices[i] = indexDoc(d, analyzer, memoryIndex).createSearcher().getIndexReader(); memoryIndices[i] = indexDoc(d, analyzer, memoryIndex).createSearcher().getIndexReader();
} }
MultiReader mReader = new MultiReader(memoryIndices, true);
try { try {
MultiReader mReader = new MultiReader(memoryIndices, true);
LeafReader slowReader = SlowCompositeReaderWrapper.wrap(mReader); LeafReader slowReader = SlowCompositeReaderWrapper.wrap(mReader);
DocSearcher docSearcher = new DocSearcher(new IndexSearcher(slowReader), rootDocMemoryIndex); DocSearcher docSearcher = new DocSearcher(new IndexSearcher(slowReader), rootDocMemoryIndex);
context.initialize(docSearcher, parsedDocument); context.initialize(docSearcher, parsedDocument);

View File

@ -223,6 +223,11 @@ abstract class QueryCollector extends SimpleCollector {
topDocsCollector = TopScoreDocCollector.create(context.size()); topDocsCollector = TopScoreDocCollector.create(context.size());
} }
@Override
public boolean needsScores() {
return super.needsScores() || topDocsCollector.needsScores();
}
@Override @Override
public void collect(int doc) throws IOException { public void collect(int doc) throws IOException {
final Query query = getQuery(doc); final Query query = getQuery(doc);
@ -286,6 +291,11 @@ abstract class QueryCollector extends SimpleCollector {
this.highlightPhase = highlightPhase; this.highlightPhase = highlightPhase;
} }
@Override
public boolean needsScores() {
return true;
}
@Override @Override
public void collect(int doc) throws IOException { public void collect(int doc) throws IOException {
final Query query = getQuery(doc); final Query query = getQuery(doc);

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.lookup; package org.elasticsearch.search.lookup;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TermStatistics;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
@ -51,8 +52,6 @@ public class IndexFieldTerm implements Iterable<TermPosition> {
private final TermStatistics termStats; private final TermStatistics termStats;
static private EmptyScorer EMPTY_SCORER = new EmptyScorer(null);
// get the document frequency of the term // get the document frequency of the term
public long df() throws IOException { public long df() throws IOException {
return termStats.docFreq(); return termStats.docFreq();
@ -129,7 +128,53 @@ public class IndexFieldTerm implements Iterable<TermPosition> {
} }
if (postings == null) { if (postings == null) {
postings = EMPTY_SCORER; final DocIdSetIterator empty = DocIdSetIterator.empty();
postings = new PostingsEnum() {
@Override
public int docID() {
return empty.docID();
}
@Override
public int nextDoc() throws IOException {
return empty.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return empty.advance(target);
}
@Override
public long cost() {
return empty.cost();
}
@Override
public int freq() throws IOException {
return 1;
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
};
} }
} catch (IOException e) { } catch (IOException e) {

View File

@ -99,24 +99,4 @@ class MockScorer extends Scorer {
public long cost() { public long cost() {
return 0; return 0;
} }
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
} }

View File

@ -261,13 +261,13 @@ public class RandomExceptionCircuitBreakerTests extends ElasticsearchIntegration
} }
public RandomExceptionDirectoryReaderWrapper(DirectoryReader in, Settings settings) { public RandomExceptionDirectoryReaderWrapper(DirectoryReader in, Settings settings) throws IOException {
super(in, new ThrowingSubReaderWrapper(settings)); super(in, new ThrowingSubReaderWrapper(settings));
this.settings = settings; this.settings = settings;
} }
@Override @Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) { protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new RandomExceptionDirectoryReaderWrapper(in, settings); return new RandomExceptionDirectoryReaderWrapper(in, settings);
} }
} }

View File

@ -359,13 +359,13 @@ public class SearchWithRandomExceptionsTests extends ElasticsearchIntegrationTes
} }
} }
public RandomExceptionDirectoryReaderWrapper(DirectoryReader in, Settings settings) { public RandomExceptionDirectoryReaderWrapper(DirectoryReader in, Settings settings) throws IOException {
super(in, new ThrowingSubReaderWrapper(settings)); super(in, new ThrowingSubReaderWrapper(settings));
this.settings = settings; this.settings = settings;
} }
@Override @Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) { protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new RandomExceptionDirectoryReaderWrapper(in, settings); return new RandomExceptionDirectoryReaderWrapper(in, settings);
} }
} }

View File

@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.annotations.*;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
import org.apache.lucene.util.TimeUnits; import org.apache.lucene.util.TimeUnits;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
@ -38,6 +39,7 @@ import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
@ThreadLeakLingering(linger = 5000) // 5 sec lingering @ThreadLeakLingering(linger = 5000) // 5 sec lingering
@TimeoutSuite(millis = TimeUnits.HOUR) @TimeoutSuite(millis = TimeUnits.HOUR)
@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose") @LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose")
@SuppressFileSystems("ExtrasFS") // we aren't ready for this yet.
public abstract class ElasticsearchLuceneTestCase extends LuceneTestCase { public abstract class ElasticsearchLuceneTestCase extends LuceneTestCase {
private static final Codec DEFAULT_CODEC = Codec.getDefault(); private static final Codec DEFAULT_CODEC = Codec.getDefault();

View File

@ -162,7 +162,7 @@ public class MockInternalEngine extends InternalEngine {
public static abstract class DirectoryReaderWrapper extends FilterDirectoryReader { public static abstract class DirectoryReaderWrapper extends FilterDirectoryReader {
protected final SubReaderWrapper subReaderWrapper; protected final SubReaderWrapper subReaderWrapper;
public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) { public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) throws IOException {
super(in, subReaderWrapper); super(in, subReaderWrapper);
this.subReaderWrapper = subReaderWrapper; this.subReaderWrapper = subReaderWrapper;
} }