remove or cutover all uses of now unsupported versions

This commit is contained in:
Mike McCandless 2016-03-02 09:05:41 -05:00
parent c62ad7bba0
commit 09aa951ad0
17 changed files with 28 additions and 296 deletions

View File

@ -134,9 +134,7 @@ public final class ArabicAnalyzer extends StopwordAnalyzerBase {
protected TokenStreamComponents createComponents(String fieldName) { protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new StandardTokenizer(); final Tokenizer source = new StandardTokenizer();
TokenStream result = new LowerCaseFilter(source); TokenStream result = new LowerCaseFilter(source);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) { result = new DecimalDigitFilter(result);
result = new DecimalDigitFilter(result);
}
// the order here is important: the stopword list is not normalized! // the order here is important: the stopword list is not normalized!
result = new StopFilter(result, stopwords); result = new StopFilter(result, stopwords);
// TODO maybe we should make ArabicNormalization filter also KeywordAttribute aware?! // TODO maybe we should make ArabicNormalization filter also KeywordAttribute aware?!

View File

@ -120,9 +120,7 @@ public final class SoraniAnalyzer extends StopwordAnalyzerBase {
TokenStream result = new StandardFilter(source); TokenStream result = new StandardFilter(source);
result = new SoraniNormalizationFilter(result); result = new SoraniNormalizationFilter(result);
result = new LowerCaseFilter(result); result = new LowerCaseFilter(result);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) { result = new DecimalDigitFilter(result);
result = new DecimalDigitFilter(result);
}
result = new StopFilter(result, stopwords); result = new StopFilter(result, stopwords);
if(!stemExclusionSet.isEmpty()) if(!stemExclusionSet.isEmpty())
result = new SetKeywordMarkerFilter(result, stemExclusionSet); result = new SetKeywordMarkerFilter(result, stemExclusionSet);

View File

@ -116,9 +116,7 @@ public final class PersianAnalyzer extends StopwordAnalyzerBase {
protected TokenStreamComponents createComponents(String fieldName) { protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new StandardTokenizer(); final Tokenizer source = new StandardTokenizer();
TokenStream result = new LowerCaseFilter(source); TokenStream result = new LowerCaseFilter(source);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) { result = new DecimalDigitFilter(result);
result = new DecimalDigitFilter(result);
}
result = new ArabicNormalizationFilter(result); result = new ArabicNormalizationFilter(result);
/* additional persian-specific normalization */ /* additional persian-specific normalization */
result = new PersianNormalizationFilter(result); result = new PersianNormalizationFilter(result);

View File

@ -117,9 +117,7 @@ public final class HindiAnalyzer extends StopwordAnalyzerBase {
protected TokenStreamComponents createComponents(String fieldName) { protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new StandardTokenizer(); final Tokenizer source = new StandardTokenizer();
TokenStream result = new LowerCaseFilter(source); TokenStream result = new LowerCaseFilter(source);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) { result = new DecimalDigitFilter(result);
result = new DecimalDigitFilter(result);
}
if (!stemExclusionSet.isEmpty()) if (!stemExclusionSet.isEmpty())
result = new SetKeywordMarkerFilter(result, stemExclusionSet); result = new SetKeywordMarkerFilter(result, stemExclusionSet);
result = new IndicNormalizationFilter(result); result = new IndicNormalizationFilter(result);

View File

@ -98,9 +98,8 @@ public final class ThaiAnalyzer extends StopwordAnalyzerBase {
protected TokenStreamComponents createComponents(String fieldName) { protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new ThaiTokenizer(); final Tokenizer source = new ThaiTokenizer();
TokenStream result = new LowerCaseFilter(source); TokenStream result = new LowerCaseFilter(source);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) { result = new DecimalDigitFilter(result);
result = new DecimalDigitFilter(result);
}
result = new StopFilter(result, stopwords); result = new StopFilter(result, stopwords);
return new TokenStreamComponents(source, result); return new TokenStreamComponents(source, result);
} }

View File

@ -110,18 +110,6 @@ public class TestArabicAnalyzer extends BaseTokenStreamTestCase {
a.close(); a.close();
} }
/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
ArabicAnalyzer a = new ArabicAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "١٢٣٤", "١٢٣٤");
a.close();
}
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
ArabicAnalyzer a = new ArabicAnalyzer(); ArabicAnalyzer a = new ArabicAnalyzer();

View File

@ -74,18 +74,6 @@ public class TestSoraniAnalyzer extends BaseTokenStreamTestCase {
a.close(); a.close();
} }
/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
SoraniAnalyzer a = new SoraniAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "١٢٣٤", "١٢٣٤");
a.close();
}
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer a = new SoraniAnalyzer(); Analyzer a = new SoraniAnalyzer();

View File

@ -95,7 +95,7 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
public void testFactoryHtmlStripClassicFolding() throws Exception { public void testFactoryHtmlStripClassicFolding() throws Exception {
CustomAnalyzer a = CustomAnalyzer.builder() CustomAnalyzer a = CustomAnalyzer.builder()
.withDefaultMatchVersion(Version.LUCENE_5_0_0) .withDefaultMatchVersion(Version.LUCENE_6_0_0)
.addCharFilter(HTMLStripCharFilterFactory.class) .addCharFilter(HTMLStripCharFilterFactory.class)
.withTokenizer(ClassicTokenizerFactory.class) .withTokenizer(ClassicTokenizerFactory.class)
.addTokenFilter(ASCIIFoldingFilterFactory.class, "preserveOriginal", "true") .addTokenFilter(ASCIIFoldingFilterFactory.class, "preserveOriginal", "true")
@ -114,7 +114,7 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass()); assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass());
assertEquals(100, a.getPositionIncrementGap("dummy")); assertEquals(100, a.getPositionIncrementGap("dummy"));
assertEquals(1000, a.getOffsetGap("dummy")); assertEquals(1000, a.getOffsetGap("dummy"));
assertSame(Version.LUCENE_5_0_0, a.getVersion()); assertSame(Version.LUCENE_6_0_0, a.getVersion());
assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR", assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR",
new String[] { "foo", "bar", "foo", "bar" }, new String[] { "foo", "bar", "foo", "bar" },
@ -127,7 +127,7 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
public void testHtmlStripClassicFolding() throws Exception { public void testHtmlStripClassicFolding() throws Exception {
CustomAnalyzer a = CustomAnalyzer.builder() CustomAnalyzer a = CustomAnalyzer.builder()
.withDefaultMatchVersion(Version.LUCENE_5_0_0) .withDefaultMatchVersion(Version.LUCENE_6_0_0)
.addCharFilter("htmlstrip") .addCharFilter("htmlstrip")
.withTokenizer("classic") .withTokenizer("classic")
.addTokenFilter("asciifolding", "preserveOriginal", "true") .addTokenFilter("asciifolding", "preserveOriginal", "true")
@ -146,7 +146,7 @@ public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass()); assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass());
assertEquals(100, a.getPositionIncrementGap("dummy")); assertEquals(100, a.getPositionIncrementGap("dummy"));
assertEquals(1000, a.getOffsetGap("dummy")); assertEquals(1000, a.getOffsetGap("dummy"));
assertSame(Version.LUCENE_5_0_0, a.getVersion()); assertSame(Version.LUCENE_6_0_0, a.getVersion());
assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR", assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR",
new String[] { "foo", "bar", "foo", "bar" }, new String[] { "foo", "bar", "foo", "bar" },

View File

@ -238,18 +238,6 @@ public class TestPersianAnalyzer extends BaseTokenStreamTestCase {
a.close(); a.close();
} }
/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
PersianAnalyzer a = new PersianAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "۱۲۳۴", "۱۲۳۴");
a.close();
}
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
PersianAnalyzer a = new PersianAnalyzer(); PersianAnalyzer a = new PersianAnalyzer();

View File

@ -57,18 +57,6 @@ public class TestHindiAnalyzer extends BaseTokenStreamTestCase {
a.close(); a.close();
} }
/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
HindiAnalyzer a = new HindiAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "१२३४", "१२३४");
a.close();
}
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new HindiAnalyzer(); Analyzer analyzer = new HindiAnalyzer();

View File

@ -132,18 +132,6 @@ public class TestThaiAnalyzer extends BaseTokenStreamTestCase {
a.close(); a.close();
} }
/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
ThaiAnalyzer a = new ThaiAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "๑๒๓๔", "๑๒๓๔");
a.close();
}
public void testTwoSentences() throws Exception { public void testTwoSentences() throws Exception {
Analyzer analyzer = new ThaiAnalyzer(CharArraySet.EMPTY_SET); Analyzer analyzer = new ThaiAnalyzer(CharArraySet.EMPTY_SET);
assertAnalyzesTo(analyzer, "This is a test. การที่ได้ต้องแสดงว่างานดี", assertAnalyzesTo(analyzer, "This is a test. การที่ได้ต้องแสดงว่างานดี",

View File

@ -1,107 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.index;
import java.io.InputStream;
import java.nio.file.Path;
import org.apache.lucene.document.Document;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
// LUCENE-6382
public class TestMaxPositionInOldIndex extends LuceneTestCase {
// Save this to BuildMaxPositionIndex.java and follow the compile/run instructions to regenerate the .zip:
/*
import java.io.IOException;
import java.nio.file.Paths;
import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.BytesRef;
// Compile:
// javac -cp lucene/build/core/lucene-core-5.1.0-SNAPSHOT.jar:lucene/build/test-framework/lucene-test-framework-5.1.0-SNAPSHOT.jar:lucene/build/analysis/common/lucene-analyzers-common-5.1.0-SNAPSHOT.jar BuildMaxPositionIndex.java
// Run:
// java -cp .:lucene/build/core/lucene-core-5.1.0-SNAPSHOT.jar:lucene/build/test-framework/lucene-test-framework-5.1.0-SNAPSHOT.jar:lucene/build/analysis/common/lucene-analyzers-common-5.1.0-SNAPSHOT.jar:lucene/build/codecs/lucene-codecs-5.1.0-SNAPSHOT.jar BuildMaxPositionIndex
// cd maxposindex
// zip maxposindex.zip *
public class BuildMaxPositionIndex {
public static void main(String[] args) throws IOException {
Directory dir = FSDirectory.open(Paths.get("maxposindex"));
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(new WhitespaceAnalyzer()));
Document doc = new Document();
// This is at position 1:
Token t1 = new Token("foo", 0, 3);
t1.setPositionIncrement(2);
Token t2 = new Token("foo", 4, 7);
// This overflows max position:
t2.setPositionIncrement(Integer.MAX_VALUE-1);
t2.setPayload(new BytesRef(new byte[] { 0x1 } ));
doc.add(new TextField("foo", new CannedTokenStream(new Token[] {t1, t2})));
iw.addDocument(doc);
iw.close();
dir.close();
}
}
*/
public void testCorruptIndex() throws Exception {
Path path = createTempDir("maxposindex");
InputStream resource = getClass().getResourceAsStream("maxposindex.zip");
assertNotNull("maxposindex not found", resource);
TestUtil.unzip(resource, path);
BaseDirectoryWrapper dir = newFSDirectory(path);
dir.setCheckIndexOnClose(false);
RuntimeException expected = expectThrows(RuntimeException.class, () -> {
TestUtil.checkIndex(dir, false, true);
});
assertTrue(expected.getMessage().contains("pos 2147483647 > IndexWriter.MAX_POSITION=2147483519"));
// Also confirm merging detects this:
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setMergeScheduler(new SerialMergeScheduler());
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter w = new IndexWriter(dir, iwc);
w.addDocument(new Document());
CorruptIndexException expectedCorruption = expectThrows(CorruptIndexException.class, () -> {
w.forceMerge(1);
});
assertEquals(expectedCorruption.getMessage(), new CorruptIndexException(expectedCorruption.getOriginalMessage(), expectedCorruption.getResourceDescription()).getMessage());
// SerialMergeScheduler
assertTrue("got message " + expectedCorruption.getMessage(),
expectedCorruption.getMessage().contains("position=2147483647 is too large (> IndexWriter.MAX_POSITION=2147483519), field=\"foo\" doc=0 (resource=PerFieldPostings(segment=_0 formats=1)"));
w.close();
dir.close();
}
}

View File

@ -50,7 +50,7 @@ public class TestSegmentInfos extends LuceneTestCase {
Codec codec = Codec.getDefault(); Codec codec = Codec.getDefault();
SegmentInfos sis = new SegmentInfos(); SegmentInfos sis = new SegmentInfos();
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_5_0_0, "_0", 1, false, Codec.getDefault(), SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(),
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap()); Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap());
info.setFiles(Collections.<String>emptySet()); info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@ -59,7 +59,7 @@ public class TestSegmentInfos extends LuceneTestCase {
sis.add(commitInfo); sis.add(commitInfo);
sis.commit(dir); sis.commit(dir);
sis = SegmentInfos.readLatestCommit(dir); sis = SegmentInfos.readLatestCommit(dir);
assertEquals(Version.LUCENE_5_0_0, sis.getMinSegmentLuceneVersion()); assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion());
assertEquals(Version.LATEST, sis.getCommitLuceneVersion()); assertEquals(Version.LATEST, sis.getCommitLuceneVersion());
dir.close(); dir.close();
} }
@ -72,14 +72,14 @@ public class TestSegmentInfos extends LuceneTestCase {
Codec codec = Codec.getDefault(); Codec codec = Codec.getDefault();
SegmentInfos sis = new SegmentInfos(); SegmentInfos sis = new SegmentInfos();
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_5_0_0, "_0", 1, false, Codec.getDefault(), SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(),
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap()); Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap());
info.setFiles(Collections.<String>emptySet()); info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, -1, -1, -1); SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, -1, -1, -1);
sis.add(commitInfo); sis.add(commitInfo);
info = new SegmentInfo(dir, Version.LUCENE_5_1_0, "_1", 1, false, Codec.getDefault(), info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_1", 1, false, Codec.getDefault(),
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap()); Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap());
info.setFiles(Collections.<String>emptySet()); info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@ -88,7 +88,7 @@ public class TestSegmentInfos extends LuceneTestCase {
sis.commit(dir); sis.commit(dir);
sis = SegmentInfos.readLatestCommit(dir); sis = SegmentInfos.readLatestCommit(dir);
assertEquals(Version.LUCENE_5_0_0, sis.getMinSegmentLuceneVersion()); assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion());
assertEquals(Version.LATEST, sis.getCommitLuceneVersion()); assertEquals(Version.LATEST, sis.getCommitLuceneVersion());
dir.close(); dir.close();
} }

View File

@ -32,25 +32,25 @@ public class TestVersion extends LuceneTestCase {
assertTrue("LATEST must be always onOrAfter("+v+")", Version.LATEST.onOrAfter(v)); assertTrue("LATEST must be always onOrAfter("+v+")", Version.LATEST.onOrAfter(v));
} }
} }
assertTrue(Version.LUCENE_6_0_0.onOrAfter(Version.LUCENE_5_0_0));; assertTrue(Version.LUCENE_7_0_0.onOrAfter(Version.LUCENE_6_0_0));;
} }
public void testToString() { public void testToString() {
assertEquals("5.0.0", Version.LUCENE_5_0_0.toString());
assertEquals("6.0.0", Version.LUCENE_6_0_0.toString()); assertEquals("6.0.0", Version.LUCENE_6_0_0.toString());
assertEquals("7.0.0", Version.LUCENE_7_0_0.toString());
} }
public void testParseLeniently() throws Exception { public void testParseLeniently() throws Exception {
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("5.0"));
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("5.0.0"));
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("LUCENE_50"));
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("LUCENE_5_0"));
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("LUCENE_5_0_0"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("6.0")); assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("6.0"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("6.0.0")); assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("6.0.0"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_60")); assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_60"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_6_0")); assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_6_0"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_6_0_0")); assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_6_0_0"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("7.0"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("7.0.0"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("LUCENE_70"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("LUCENE_7_0"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("LUCENE_7_0_0"));
assertEquals(Version.LATEST, Version.parseLeniently("LATEST")); assertEquals(Version.LATEST, Version.parseLeniently("LATEST"));
assertEquals(Version.LATEST, Version.parseLeniently("latest")); assertEquals(Version.LATEST, Version.parseLeniently("latest"));
assertEquals(Version.LATEST, Version.parseLeniently("LUCENE_CURRENT")); assertEquals(Version.LATEST, Version.parseLeniently("LUCENE_CURRENT"));
@ -95,7 +95,7 @@ public class TestVersion extends LuceneTestCase {
public void testParse() throws Exception { public void testParse() throws Exception {
assertEquals(Version.LUCENE_6_0_0, Version.parse("6.0.0")); assertEquals(Version.LUCENE_6_0_0, Version.parse("6.0.0"));
assertEquals(Version.LUCENE_5_0_0, Version.parse("5.0.0")); assertEquals(Version.LUCENE_7_0_0, Version.parse("7.0.0"));
// Version does not pass judgement on the major version: // Version does not pass judgement on the major version:
assertEquals(1, Version.parse("1.0").major); assertEquals(1, Version.parse("1.0").major);
@ -103,7 +103,7 @@ public class TestVersion extends LuceneTestCase {
} }
public void testForwardsCompatibility() throws Exception { public void testForwardsCompatibility() throws Exception {
assertTrue(Version.parse("5.10.20").onOrAfter(Version.LUCENE_5_0_0)); assertTrue(Version.parse("6.10.20").onOrAfter(Version.LUCENE_6_0_0));
} }
public void testParseExceptions() { public void testParseExceptions() {

View File

@ -371,15 +371,10 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
// for distributed queries that don't include shards.qt, use the original path // for distributed queries that don't include shards.qt, use the original path
// as the default but operators need to update their luceneMatchVersion to enable // as the default but operators need to update their luceneMatchVersion to enable
// this behavior since it did not work this way prior to 5.1 // this behavior since it did not work this way prior to 5.1
if (req.getCore().getSolrConfig().luceneMatchVersion.onOrAfter(Version.LUCENE_5_1_0)) { String reqPath = (String) req.getContext().get(PATH);
String reqPath = (String) req.getContext().get(PATH); if (!"/select".equals(reqPath)) {
if (!"/select".equals(reqPath)) { params.set(CommonParams.QT, reqPath);
params.set(CommonParams.QT, reqPath); } // else if path is /select, then the qt gets passed thru if set
} // else if path is /select, then the qt gets passed thru if set
} else {
// this is the pre-5.1 behavior, which translates to sending the shard request to /select
params.remove(CommonParams.QT);
}
} }
shardHandler1.submit(sreq, shard, params, rb.preferredHostAddress); shardHandler1.submit(sreq, shard, params, rb.preferredHostAddress);
} }

View File

@ -23,7 +23,6 @@ import org.junit.BeforeClass;
/** /**
* Tests per-field similarity support in the schema * Tests per-field similarity support in the schema
* @see TestPerFieldSimilarityClassic
*/ */
public class TestPerFieldSimilarity extends BaseSimilarityTestCase { public class TestPerFieldSimilarity extends BaseSimilarityTestCase {

View File

@ -1,86 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search.similarities;
import org.apache.lucene.misc.SweetSpotSimilarity;
import org.apache.lucene.search.similarities.ClassicSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.Version;
import org.junit.AfterClass;
import org.junit.BeforeClass;
/**
* Tests per-field similarity support in the schema when luceneMatchVersion indicates
* {@link ClassicSimilarity} should be the default.
* @see TestPerFieldSimilarity
*/
public class TestPerFieldSimilarityClassic extends BaseSimilarityTestCase {
@BeforeClass
public static void beforeClass() throws Exception {
// any value below 6.0 should have this behavior
System.setProperty("tests.luceneMatchVersion", Version.LUCENE_5_3_1.toString());
initCore("solrconfig-basic.xml","schema-sim.xml");
}
@AfterClass
public static void afterClass() throws Exception {
System.clearProperty("tests.luceneMatchVersion");
}
/** test a field where the sim is specified directly */
public void testDirect() throws Exception {
assertEquals(SweetSpotSimilarity.class, getSimilarity("sim1text").getClass());
}
/** ... and for a dynamic field */
public void testDirectDynamic() throws Exception {
assertEquals(SweetSpotSimilarity.class, getSimilarity("text_sim1").getClass());
}
/** test a field where a configurable sim factory is defined */
public void testFactory() throws Exception {
Similarity sim = getSimilarity("sim2text");
assertEquals(MockConfigurableSimilarity.class, sim.getClass());
assertEquals("is there an echo?", ((MockConfigurableSimilarity)sim).getPassthrough());
}
/** ... and for a dynamic field */
public void testFactoryDynamic() throws Exception {
Similarity sim = getSimilarity("text_sim2");
assertEquals(MockConfigurableSimilarity.class, sim.getClass());
assertEquals("is there an echo?", ((MockConfigurableSimilarity)sim).getPassthrough());
}
/** test a field where no similarity is specified */
public void testDefaults() throws Exception {
Similarity sim = getSimilarity("sim3text");
assertEquals(ClassicSimilarity.class, sim.getClass());;
}
/** ... and for a dynamic field */
public void testDefaultsDynamic() throws Exception {
Similarity sim = getSimilarity("text_sim3");
assertEquals(ClassicSimilarity.class, sim.getClass());
}
/** test a field that does not exist */
public void testNonexistent() throws Exception {
Similarity sim = getSimilarity("sdfdsfdsfdswr5fsdfdsfdsfs");
assertEquals(ClassicSimilarity.class, sim.getClass());
}
}