Upgrade to Lucene r1662607

Closes #9915.

Squashed commit of the following:

commit cfa59f5a3f03d9d1b432980dcee6495447c1e7ea
Author: Robert Muir <rmuir@apache.org>
Date:   Fri Feb 27 12:10:16 2015 -0500

    add missing null check

commit 62fe5403068c730c0e0b6fd1ab1a0246eeef6220
Author: Robert Muir <rmuir@apache.org>
Date:   Fri Feb 27 11:31:53 2015 -0500

    Disable ExtrasFS for now, until we hook all this in properly in a separate issue.

commit 822795c57c5cf846423fad443c2327c4ed0094ac
Author: Adrien Grand <jpountz@gmail.com>
Date:   Fri Feb 27 10:12:02 2015 +0100

    Fix PercolatorTests.

commit 98b2a0a7d8298648125c9a367cb7e31b3ec7d51b
Author: Adrien Grand <jpountz@gmail.com>
Date:   Fri Feb 27 09:27:11 2015 +0100

    Fix ChildrenQueryTests.

commit 9b99656fc56bbd01c9afe22baffae3c37bb48a71
Author: Robert Muir <rmuir@apache.org>
Date:   Thu Feb 26 20:50:02 2015 -0500

    cutover apis, no work on test failures yet.
This commit is contained in:
Robert Muir 2015-02-27 12:50:46 -05:00
parent efbda318d0
commit 4ca57ab10a
21 changed files with 89 additions and 214 deletions

View File

@ -32,7 +32,7 @@
<properties>
<lucene.version>5.1.0</lucene.version>
<lucene.maven.version>5.1.0-snapshot-1660560</lucene.maven.version>
<lucene.maven.version>5.1.0-snapshot-1662607</lucene.maven.version>
<tests.jvms>auto</tests.jvms>
<tests.shuffle>true</tests.shuffle>
<tests.output>onerror</tests.output>
@ -56,7 +56,7 @@
<repository>
<id>lucene-snapshots</id>
<name>Lucene Snapshots</name>
<url>https://download.elasticsearch.org/lucenesnapshots/1660560</url>
<url>https://download.elasticsearch.org/lucenesnapshots/1662607</url>
</repository>
</repositories>

View File

@ -61,7 +61,14 @@ public class XPostingsHighlighter {
// unnecessary.
/** for rewriting: we don't want slow processing from MTQs */
private static final IndexReader EMPTY_INDEXREADER = new MultiReader();
private static final IndexReader EMPTY_INDEXREADER;
static {
try {
EMPTY_INDEXREADER = new MultiReader();
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
/** Default maximum content size to process. Typically snippets
* closer to the beginning of the document better summarize its content */

View File

@ -705,22 +705,6 @@ public class Lucene {
public int nextDoc() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int nextPosition() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int startOffset() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public int endOffset() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
@Override
public BytesRef getPayload() throws IOException {
throw new ElasticsearchIllegalStateException(message);
}
};
}

View File

@ -24,6 +24,8 @@ import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.LeafReader;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
/**
* A {@link org.apache.lucene.index.FilterDirectoryReader} that exposes
* Elasticsearch internal per shard / index information like the shard ID.
@ -33,7 +35,7 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
private final ShardId shardId;
private final FilterDirectoryReader.SubReaderWrapper wrapper;
private ElasticsearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper, ShardId shardId) {
private ElasticsearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper, ShardId shardId) throws IOException {
super(in, wrapper);
this.wrapper = wrapper;
this.shardId = shardId;
@ -47,7 +49,7 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) {
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new ElasticsearchDirectoryReader(in, wrapper, shardId);
}
@ -59,7 +61,7 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
* @param reader the reader to wrap
* @param shardId the shard ID to expose via the elasticsearch internal reader wrappers.
*/
public static ElasticsearchDirectoryReader wrap(DirectoryReader reader, ShardId shardId) {
public static ElasticsearchDirectoryReader wrap(DirectoryReader reader, ShardId shardId) throws IOException {
return new ElasticsearchDirectoryReader(reader, new SubReaderWrapper(shardId), shardId);
}

View File

@ -65,24 +65,4 @@ public class EmptyScorer extends Scorer {
public long cost() {
return 0;
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}

View File

@ -82,28 +82,6 @@ abstract class CustomBoostFactorScorer extends Scorer {
return scorer.cost();
}
@Override
public int nextPosition() throws IOException {
return scorer.nextPosition();
}
@Override
public int startOffset() throws IOException {
return scorer.startOffset();
}
@Override
public int endOffset() throws IOException {
return scorer.endOffset();
}
@Override
public BytesRef getPayload() throws IOException {
return scorer.getPayload();
}
public interface NextDoc {
public int advance(int target) throws IOException;

View File

@ -65,26 +65,6 @@ public class ScriptScoreFunction extends ScoreFunction {
throw new UnsupportedOperationException();
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
@Override
public long cost() {
return 1;

View File

@ -87,7 +87,9 @@ public class ChildrenConstantScoreQuery extends Query {
@Override
public void extractTerms(Set<Term> terms) {
rewrittenChildQuery.extractTerms(terms);
if (rewrittenChildQuery != null) {
rewrittenChildQuery.extractTerms(terms);
}
}
@Override

View File

@ -394,6 +394,11 @@ public class ChildrenQuery extends Query {
this.scores = this.bigArrays.newFloatArray(512, false);
}
@Override
public boolean needsScores() {
return true;
}
@Override
protected void newParent(long parentIdx) throws IOException {
scores = bigArrays.grow(scores, parentIdx + 1);
@ -642,26 +647,6 @@ public class ChildrenQuery extends Query {
public long cost() {
return parentsIterator.cost();
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}
private static class ParentCountScorer extends ParentScorer {

View File

@ -74,24 +74,4 @@ public class ConstantScorer extends Scorer {
public long cost() {
return docIdSetIterator.cost();
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}

View File

@ -344,25 +344,5 @@ public class ParentQuery extends Query {
public long cost() {
return childrenIterator.cost();
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}
}

View File

@ -416,26 +416,6 @@ public class TopChildrenQuery extends Query {
public final long cost() {
return docs.length;
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}
private static class ParentDocComparator implements Comparator<ParentDoc> {

View File

@ -235,26 +235,6 @@ public class IncludeNestedDocsQuery extends Query {
return parentScorer.freq();
}
@Override
public int nextPosition() throws IOException {
return parentScorer.nextPosition();
}
@Override
public int startOffset() throws IOException {
return parentScorer.startOffset();
}
@Override
public int endOffset() throws IOException {
return parentScorer.endOffset();
}
@Override
public BytesRef getPayload() throws IOException {
return parentScorer.getPayload();
}
@Override
public int docID() {
return currentDoc;

View File

@ -68,8 +68,8 @@ class MultiDocumentPercolatorIndex implements PercolatorIndex {
Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer();
memoryIndices[i] = indexDoc(d, analyzer, memoryIndex).createSearcher().getIndexReader();
}
MultiReader mReader = new MultiReader(memoryIndices, true);
try {
MultiReader mReader = new MultiReader(memoryIndices, true);
LeafReader slowReader = SlowCompositeReaderWrapper.wrap(mReader);
DocSearcher docSearcher = new DocSearcher(new IndexSearcher(slowReader), rootDocMemoryIndex);
context.initialize(docSearcher, parsedDocument);

View File

@ -223,6 +223,11 @@ abstract class QueryCollector extends SimpleCollector {
topDocsCollector = TopScoreDocCollector.create(context.size());
}
@Override
public boolean needsScores() {
return super.needsScores() || topDocsCollector.needsScores();
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
@ -286,6 +291,11 @@ abstract class QueryCollector extends SimpleCollector {
this.highlightPhase = highlightPhase;
}
@Override
public boolean needsScores() {
return true;
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.TermStatistics;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
@ -51,8 +52,6 @@ public class IndexFieldTerm implements Iterable<TermPosition> {
private final TermStatistics termStats;
static private EmptyScorer EMPTY_SCORER = new EmptyScorer(null);
// get the document frequency of the term
public long df() throws IOException {
return termStats.docFreq();
@ -129,7 +128,53 @@ public class IndexFieldTerm implements Iterable<TermPosition> {
}
if (postings == null) {
postings = EMPTY_SCORER;
final DocIdSetIterator empty = DocIdSetIterator.empty();
postings = new PostingsEnum() {
@Override
public int docID() {
return empty.docID();
}
@Override
public int nextDoc() throws IOException {
return empty.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return empty.advance(target);
}
@Override
public long cost() {
return empty.cost();
}
@Override
public int freq() throws IOException {
return 1;
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
};
}
} catch (IOException e) {

View File

@ -99,24 +99,4 @@ class MockScorer extends Scorer {
public long cost() {
return 0;
}
@Override
public int nextPosition() throws IOException {
return -1;
}
@Override
public int startOffset() throws IOException {
return -1;
}
@Override
public int endOffset() throws IOException {
return -1;
}
@Override
public BytesRef getPayload() throws IOException {
return null;
}
}

View File

@ -261,13 +261,13 @@ public class RandomExceptionCircuitBreakerTests extends ElasticsearchIntegration
}
public RandomExceptionDirectoryReaderWrapper(DirectoryReader in, Settings settings) {
public RandomExceptionDirectoryReaderWrapper(DirectoryReader in, Settings settings) throws IOException {
super(in, new ThrowingSubReaderWrapper(settings));
this.settings = settings;
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) {
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new RandomExceptionDirectoryReaderWrapper(in, settings);
}
}

View File

@ -359,13 +359,13 @@ public class SearchWithRandomExceptionsTests extends ElasticsearchIntegrationTes
}
}
public RandomExceptionDirectoryReaderWrapper(DirectoryReader in, Settings settings) {
public RandomExceptionDirectoryReaderWrapper(DirectoryReader in, Settings settings) throws IOException {
super(in, new ThrowingSubReaderWrapper(settings));
this.settings = settings;
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) {
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new RandomExceptionDirectoryReaderWrapper(in, settings);
}
}

View File

@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.annotations.*;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
import org.apache.lucene.util.TimeUnits;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
@ -38,6 +39,7 @@ import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
@ThreadLeakLingering(linger = 5000) // 5 sec lingering
@TimeoutSuite(millis = TimeUnits.HOUR)
@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose")
@SuppressFileSystems("ExtrasFS") // we aren't ready for this yet.
public abstract class ElasticsearchLuceneTestCase extends LuceneTestCase {
private static final Codec DEFAULT_CODEC = Codec.getDefault();

View File

@ -162,7 +162,7 @@ public class MockInternalEngine extends InternalEngine {
public static abstract class DirectoryReaderWrapper extends FilterDirectoryReader {
protected final SubReaderWrapper subReaderWrapper;
public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) {
public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) throws IOException {
super(in, subReaderWrapper);
this.subReaderWrapper = subReaderWrapper;
}