mirror of https://github.com/apache/lucene.git
LUCENE-7753: Make fields static when possible.
This commit is contained in:
parent
602cce304c
commit
30dc73adc0
|
@ -80,6 +80,9 @@ Other
|
|||
|
||||
* LUCENE-7681: MemoryIndex uses new DocValues API (Alan Woodward)
|
||||
|
||||
* LUCENE-7753: Make fields static when possible.
|
||||
(Daniel Jelinski via Adrien Grand)
|
||||
|
||||
======================= Lucene 6.6.0 =======================
|
||||
|
||||
Other
|
||||
|
|
|
@ -61,7 +61,7 @@ public class EnwikiContentSourceTest extends LuceneTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
private final String PAGE1 =
|
||||
private static final String PAGE1 =
|
||||
" <page>\r\n" +
|
||||
" <title>Title1</title>\r\n" +
|
||||
" <ns>0</ns>\r\n" +
|
||||
|
@ -80,7 +80,7 @@ public class EnwikiContentSourceTest extends LuceneTestCase {
|
|||
" </revision>\r\n" +
|
||||
" </page>\r\n";
|
||||
|
||||
private final String PAGE2 =
|
||||
private static final String PAGE2 =
|
||||
" <page>\r\n" +
|
||||
" <title>Title2</title>\r\n" +
|
||||
" <ns>0</ns>\r\n" +
|
||||
|
|
|
@ -38,8 +38,8 @@ import org.apache.lucene.util.TestUtil;
|
|||
*
|
||||
*/
|
||||
public class TestCustomNorms extends LuceneTestCase {
|
||||
final String floatTestField = "normsTestFloat";
|
||||
final String exceptionTestField = "normsTestExcp";
|
||||
static final String FLOAT_TEST_FIELD = "normsTestFloat";
|
||||
static final String EXCEPTION_TEST_FIELD = "normsTestExcp";
|
||||
|
||||
public void testFloatNorms() throws IOException {
|
||||
|
||||
|
@ -57,11 +57,11 @@ public class TestCustomNorms extends LuceneTestCase {
|
|||
Document doc = docs.nextDoc();
|
||||
int boost = TestUtil.nextInt(random(), 1, 10);
|
||||
String value = IntStream.range(0, boost).mapToObj(k -> Integer.toString(boost)).collect(Collectors.joining(" "));
|
||||
Field f = new TextField(floatTestField, value, Field.Store.YES);
|
||||
Field f = new TextField(FLOAT_TEST_FIELD, value, Field.Store.YES);
|
||||
|
||||
doc.add(f);
|
||||
writer.addDocument(doc);
|
||||
doc.removeField(floatTestField);
|
||||
doc.removeField(FLOAT_TEST_FIELD);
|
||||
if (rarely()) {
|
||||
writer.commit();
|
||||
}
|
||||
|
@ -69,11 +69,11 @@ public class TestCustomNorms extends LuceneTestCase {
|
|||
writer.commit();
|
||||
writer.close();
|
||||
DirectoryReader open = DirectoryReader.open(dir);
|
||||
NumericDocValues norms = MultiDocValues.getNormValues(open, floatTestField);
|
||||
NumericDocValues norms = MultiDocValues.getNormValues(open, FLOAT_TEST_FIELD);
|
||||
assertNotNull(norms);
|
||||
for (int i = 0; i < open.maxDoc(); i++) {
|
||||
Document document = open.document(i);
|
||||
int expected = Integer.parseInt(document.get(floatTestField).split(" ")[0]);
|
||||
int expected = Integer.parseInt(document.get(FLOAT_TEST_FIELD).split(" ")[0]);
|
||||
assertEquals(i, norms.nextDoc());
|
||||
assertEquals(expected, norms.longValue());
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ public class TestCustomNorms extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public Similarity get(String field) {
|
||||
if (floatTestField.equals(field)) {
|
||||
if (FLOAT_TEST_FIELD.equals(field)) {
|
||||
return new FloatEncodingBoostSimilarity();
|
||||
} else {
|
||||
return delegate;
|
||||
|
|
|
@ -48,7 +48,7 @@ import org.apache.lucene.util.TestUtil;
|
|||
@SuppressCodecs({ "Memory", "Direct", "SimpleText" })
|
||||
@Slow
|
||||
public class TestNorms extends LuceneTestCase {
|
||||
final String byteTestField = "normsTestByte";
|
||||
static final String BYTE_TEST_FIELD = "normsTestByte";
|
||||
|
||||
static class CustomNormEncodingSimilarity extends TFIDFSimilarity {
|
||||
|
||||
|
@ -115,11 +115,11 @@ public class TestNorms extends LuceneTestCase {
|
|||
Directory dir = newFSDirectory(createTempDir("TestNorms.testMaxByteNorms"));
|
||||
buildIndex(dir);
|
||||
DirectoryReader open = DirectoryReader.open(dir);
|
||||
NumericDocValues normValues = MultiDocValues.getNormValues(open, byteTestField);
|
||||
NumericDocValues normValues = MultiDocValues.getNormValues(open, BYTE_TEST_FIELD);
|
||||
assertNotNull(normValues);
|
||||
for (int i = 0; i < open.maxDoc(); i++) {
|
||||
Document document = open.document(i);
|
||||
int expected = Integer.parseInt(document.get(byteTestField).split(" ")[0]);
|
||||
int expected = Integer.parseInt(document.get(BYTE_TEST_FIELD).split(" ")[0]);
|
||||
assertEquals(i, normValues.nextDoc());
|
||||
assertEquals(expected, normValues.longValue());
|
||||
}
|
||||
|
@ -143,10 +143,10 @@ public class TestNorms extends LuceneTestCase {
|
|||
Document doc = docs.nextDoc();
|
||||
int boost = TestUtil.nextInt(random, 1, 255);
|
||||
String value = IntStream.range(0, boost).mapToObj(k -> Integer.toString(boost)).collect(Collectors.joining(" "));
|
||||
Field f = new TextField(byteTestField, value, Field.Store.YES);
|
||||
Field f = new TextField(BYTE_TEST_FIELD, value, Field.Store.YES);
|
||||
doc.add(f);
|
||||
writer.addDocument(doc);
|
||||
doc.removeField(byteTestField);
|
||||
doc.removeField(BYTE_TEST_FIELD);
|
||||
if (rarely()) {
|
||||
writer.commit();
|
||||
}
|
||||
|
@ -162,7 +162,7 @@ public class TestNorms extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public Similarity get(String field) {
|
||||
if (byteTestField.equals(field)) {
|
||||
if (BYTE_TEST_FIELD.equals(field)) {
|
||||
return new ByteEncodingBoostSimilarity();
|
||||
} else {
|
||||
return delegate;
|
||||
|
|
|
@ -64,7 +64,7 @@ final class BugReproTokenStream extends TokenStream {
|
|||
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
|
||||
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
|
||||
private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class);
|
||||
private final int tokenCount = 4;
|
||||
private static final int TOKEN_COUNT = 4;
|
||||
private int nextTokenIndex = 0;
|
||||
private final String terms[] = new String[]{"six", "six", "drunken", "drunken"};
|
||||
private final int starts[] = new int[]{0, 0, 4, 4};
|
||||
|
@ -73,7 +73,7 @@ final class BugReproTokenStream extends TokenStream {
|
|||
|
||||
@Override
|
||||
public boolean incrementToken() {
|
||||
if (nextTokenIndex < tokenCount) {
|
||||
if (nextTokenIndex < TOKEN_COUNT) {
|
||||
termAtt.setEmpty().append(terms[nextTokenIndex]);
|
||||
offsetAtt.setOffset(starts[nextTokenIndex], ends[nextTokenIndex]);
|
||||
posIncAtt.setPositionIncrement(incs[nextTokenIndex]);
|
||||
|
|
|
@ -49,7 +49,7 @@ public class TestAutomatonQuery extends LuceneTestCase {
|
|||
private IndexReader reader;
|
||||
private IndexSearcher searcher;
|
||||
|
||||
private final String FN = "field";
|
||||
private static final String FN = "field";
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
|
|
|
@ -39,7 +39,7 @@ public class TestAutomatonQueryUnicode extends LuceneTestCase {
|
|||
private IndexSearcher searcher;
|
||||
private Directory directory;
|
||||
|
||||
private final String FN = "field";
|
||||
private static final String FN = "field";
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
|
|
|
@ -35,14 +35,6 @@ import org.apache.lucene.index.QueryTimeout;
|
|||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SerialMergeScheduler;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopFieldCollector;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -57,7 +49,7 @@ public class TestEarlyTerminatingSortingCollector extends LuceneTestCase {
|
|||
private final Sort sort = new Sort(new SortField("ndv1", SortField.Type.LONG));
|
||||
private RandomIndexWriter iw;
|
||||
private IndexReader reader;
|
||||
private final int forceMergeMaxSegmentCount = 5;
|
||||
private static final int FORCE_MERGE_MAX_SEGMENT_COUNT = 5;
|
||||
|
||||
private Document randomDocument() {
|
||||
final Document doc = new Document();
|
||||
|
@ -107,7 +99,7 @@ public class TestEarlyTerminatingSortingCollector extends LuceneTestCase {
|
|||
iw.forceMerge(1);
|
||||
}
|
||||
else if (random().nextBoolean()) {
|
||||
iw.forceMerge(forceMergeMaxSegmentCount);
|
||||
iw.forceMerge(FORCE_MERGE_MAX_SEGMENT_COUNT);
|
||||
}
|
||||
reader = iw.getReader();
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ public class TestRegexpQuery extends LuceneTestCase {
|
|||
private IndexSearcher searcher;
|
||||
private IndexReader reader;
|
||||
private Directory directory;
|
||||
private final String FN = "field";
|
||||
private static final String FN = "field";
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
|||
@SuppressCodecs({ "SimpleText", "Memory", "Direct" })
|
||||
public class TestSearchWithThreads extends LuceneTestCase {
|
||||
int NUM_DOCS;
|
||||
final int NUM_SEARCH_THREADS = 5;
|
||||
static final int NUM_SEARCH_THREADS = 5;
|
||||
int RUN_TIME_MSEC;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -49,7 +49,7 @@ public class TestRAMDirectory extends BaseDirectoryTestCase {
|
|||
}
|
||||
|
||||
// add enough document so that the index will be larger than RAMDirectory.READ_BUFFER_SIZE
|
||||
private final int docsToAdd = 500;
|
||||
private static final int DOCS_TO_ADD = 500;
|
||||
|
||||
private Path buildIndex() throws IOException {
|
||||
Path path = createTempDir("buildIndex");
|
||||
|
@ -59,12 +59,12 @@ public class TestRAMDirectory extends BaseDirectoryTestCase {
|
|||
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
|
||||
// add some documents
|
||||
Document doc = null;
|
||||
for (int i = 0; i < docsToAdd; i++) {
|
||||
for (int i = 0; i < DOCS_TO_ADD; i++) {
|
||||
doc = new Document();
|
||||
doc.add(newStringField("content", English.intToEnglish(i).trim(), Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
assertEquals(docsToAdd, writer.maxDoc());
|
||||
assertEquals(DOCS_TO_ADD, writer.maxDoc());
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
||||
|
@ -100,13 +100,13 @@ public class TestRAMDirectory extends BaseDirectoryTestCase {
|
|||
|
||||
// open reader to test document count
|
||||
IndexReader reader = DirectoryReader.open(ramDir);
|
||||
assertEquals(docsToAdd, reader.numDocs());
|
||||
assertEquals(DOCS_TO_ADD, reader.numDocs());
|
||||
|
||||
// open search zo check if all doc's are there
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
|
||||
// search for all documents
|
||||
for (int i = 0; i < docsToAdd; i++) {
|
||||
for (int i = 0; i < DOCS_TO_ADD; i++) {
|
||||
Document doc = searcher.doc(i);
|
||||
assertTrue(doc.getField("content") != null);
|
||||
}
|
||||
|
@ -115,8 +115,8 @@ public class TestRAMDirectory extends BaseDirectoryTestCase {
|
|||
reader.close();
|
||||
}
|
||||
|
||||
private final int numThreads = 10;
|
||||
private final int docsPerThread = 40;
|
||||
private static final int NUM_THREADS = 10;
|
||||
private static final int DOCS_PER_THREAD = 40;
|
||||
|
||||
public void testRAMDirectorySize() throws IOException, InterruptedException {
|
||||
|
||||
|
@ -132,15 +132,15 @@ public class TestRAMDirectory extends BaseDirectoryTestCase {
|
|||
|
||||
assertEquals(ramDir.sizeInBytes(), ramDir.getRecomputedSizeInBytes());
|
||||
|
||||
Thread[] threads = new Thread[numThreads];
|
||||
for (int i=0; i<numThreads; i++) {
|
||||
Thread[] threads = new Thread[NUM_THREADS];
|
||||
for (int i = 0; i< NUM_THREADS; i++) {
|
||||
final int num = i;
|
||||
threads[i] = new Thread(){
|
||||
@Override
|
||||
public void run() {
|
||||
for (int j=1; j<docsPerThread; j++) {
|
||||
for (int j = 1; j< DOCS_PER_THREAD; j++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newStringField("sizeContent", English.intToEnglish(num*docsPerThread+j).trim(), Field.Store.YES));
|
||||
doc.add(newStringField("sizeContent", English.intToEnglish(num* DOCS_PER_THREAD +j).trim(), Field.Store.YES));
|
||||
try {
|
||||
writer.addDocument(doc);
|
||||
} catch (IOException e) {
|
||||
|
@ -150,10 +150,10 @@ public class TestRAMDirectory extends BaseDirectoryTestCase {
|
|||
}
|
||||
};
|
||||
}
|
||||
for (int i=0; i<numThreads; i++) {
|
||||
for (int i = 0; i< NUM_THREADS; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
for (int i=0; i<numThreads; i++) {
|
||||
for (int i = 0; i< NUM_THREADS; i++) {
|
||||
threads[i].join();
|
||||
}
|
||||
|
||||
|
|
|
@ -59,8 +59,8 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
|
||||
private final static NullComparator nullComparator = new NullComparator();
|
||||
|
||||
private final String groupField = "author";
|
||||
private final String countField = "publisher";
|
||||
private static final String GROUP_FIELD = "author";
|
||||
private static final String COUNT_FIELD = "publisher";
|
||||
|
||||
public void testSimple() throws Exception {
|
||||
Random random = random();
|
||||
|
@ -70,24 +70,24 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
dir,
|
||||
newIndexWriterConfig(new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
Document doc = new Document();
|
||||
addField(doc, groupField, "1");
|
||||
addField(doc, countField, "1");
|
||||
addField(doc, GROUP_FIELD, "1");
|
||||
addField(doc, COUNT_FIELD, "1");
|
||||
doc.add(new TextField("content", "random text", Field.Store.NO));
|
||||
doc.add(new StringField("id", "1", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
||||
// 1
|
||||
doc = new Document();
|
||||
addField(doc, groupField, "1");
|
||||
addField(doc, countField, "1");
|
||||
addField(doc, GROUP_FIELD, "1");
|
||||
addField(doc, COUNT_FIELD, "1");
|
||||
doc.add(new TextField("content", "some more random text blob", Field.Store.NO));
|
||||
doc.add(new StringField("id", "2", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
||||
// 2
|
||||
doc = new Document();
|
||||
addField(doc, groupField, "1");
|
||||
addField(doc, countField, "2");
|
||||
addField(doc, GROUP_FIELD, "1");
|
||||
addField(doc, COUNT_FIELD, "2");
|
||||
doc.add(new TextField("content", "some more random textual data", Field.Store.NO));
|
||||
doc.add(new StringField("id", "3", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
@ -95,23 +95,23 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
|
||||
// 3 -- no count field
|
||||
doc = new Document();
|
||||
addField(doc, groupField, "2");
|
||||
addField(doc, GROUP_FIELD, "2");
|
||||
doc.add(new TextField("content", "some random text", Field.Store.NO));
|
||||
doc.add(new StringField("id", "4", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
||||
// 4
|
||||
doc = new Document();
|
||||
addField(doc, groupField, "3");
|
||||
addField(doc, countField, "1");
|
||||
addField(doc, GROUP_FIELD, "3");
|
||||
addField(doc, COUNT_FIELD, "1");
|
||||
doc.add(new TextField("content", "some more random text", Field.Store.NO));
|
||||
doc.add(new StringField("id", "5", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
||||
// 5
|
||||
doc = new Document();
|
||||
addField(doc, groupField, "3");
|
||||
addField(doc, countField, "1");
|
||||
addField(doc, GROUP_FIELD, "3");
|
||||
addField(doc, COUNT_FIELD, "1");
|
||||
doc.add(new TextField("content", "random blob", Field.Store.NO));
|
||||
doc.add(new StringField("id", "6", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
@ -119,7 +119,7 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
// 6 -- no author field
|
||||
doc = new Document();
|
||||
doc.add(new TextField("content", "random word stuck in alot of other text", Field.Store.YES));
|
||||
addField(doc, countField, "1");
|
||||
addField(doc, COUNT_FIELD, "1");
|
||||
doc.add(new StringField("id", "6", Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
|
||||
|
@ -145,10 +145,10 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
};
|
||||
|
||||
// === Search for content:random
|
||||
FirstPassGroupingCollector<Comparable<Object>> firstCollector = createRandomFirstPassCollector(new Sort(), groupField, 10);
|
||||
FirstPassGroupingCollector<Comparable<Object>> firstCollector = createRandomFirstPassCollector(new Sort(), GROUP_FIELD, 10);
|
||||
indexSearcher.search(new TermQuery(new Term("content", "random")), firstCollector);
|
||||
DistinctValuesCollector<Comparable<Object>> distinctValuesCollector
|
||||
= createDistinctCountCollector(firstCollector, groupField, countField);
|
||||
= createDistinctCountCollector(firstCollector, GROUP_FIELD, COUNT_FIELD);
|
||||
indexSearcher.search(new TermQuery(new Term("content", "random")), distinctValuesCollector);
|
||||
|
||||
List<DistinctValuesCollector.GroupCount<Comparable<Object>>> gcs = distinctValuesCollector.getGroups();
|
||||
|
@ -178,9 +178,9 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
compare("1", countValues.get(0));
|
||||
|
||||
// === Search for content:some
|
||||
firstCollector = createRandomFirstPassCollector(new Sort(), groupField, 10);
|
||||
firstCollector = createRandomFirstPassCollector(new Sort(), GROUP_FIELD, 10);
|
||||
indexSearcher.search(new TermQuery(new Term("content", "some")), firstCollector);
|
||||
distinctValuesCollector = createDistinctCountCollector(firstCollector, groupField, countField);
|
||||
distinctValuesCollector = createDistinctCountCollector(firstCollector, GROUP_FIELD, COUNT_FIELD);
|
||||
indexSearcher.search(new TermQuery(new Term("content", "some")), distinctValuesCollector);
|
||||
|
||||
gcs = distinctValuesCollector.getGroups();
|
||||
|
@ -205,9 +205,9 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
compare("1", countValues.get(0));
|
||||
|
||||
// === Search for content:blob
|
||||
firstCollector = createRandomFirstPassCollector(new Sort(), groupField, 10);
|
||||
firstCollector = createRandomFirstPassCollector(new Sort(), GROUP_FIELD, 10);
|
||||
indexSearcher.search(new TermQuery(new Term("content", "blob")), firstCollector);
|
||||
distinctValuesCollector = createDistinctCountCollector(firstCollector, groupField, countField);
|
||||
distinctValuesCollector = createDistinctCountCollector(firstCollector, GROUP_FIELD, COUNT_FIELD);
|
||||
indexSearcher.search(new TermQuery(new Term("content", "blob")), distinctValuesCollector);
|
||||
|
||||
gcs = distinctValuesCollector.getGroups();
|
||||
|
@ -242,10 +242,10 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
|
||||
List<DistinctValuesCollector.GroupCount<Comparable<?>>> expectedResult = createExpectedResult(context, term, groupSort, topN);
|
||||
|
||||
FirstPassGroupingCollector<Comparable<?>> firstCollector = createRandomFirstPassCollector(groupSort, groupField, topN);
|
||||
FirstPassGroupingCollector<Comparable<?>> firstCollector = createRandomFirstPassCollector(groupSort, GROUP_FIELD, topN);
|
||||
searcher.search(new TermQuery(new Term("content", term)), firstCollector);
|
||||
DistinctValuesCollector<Comparable<?>> distinctValuesCollector
|
||||
= createDistinctCountCollector(firstCollector, groupField, countField);
|
||||
= createDistinctCountCollector(firstCollector, GROUP_FIELD, COUNT_FIELD);
|
||||
searcher.search(new TermQuery(new Term("content", term)), distinctValuesCollector);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<DistinctValuesCollector.GroupCount<Comparable<?>>> actualResult = distinctValuesCollector.getGroups();
|
||||
|
@ -440,10 +440,10 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase {
|
|||
doc.add(new StringField("id", String.format(Locale.ROOT, "%09d", i), Field.Store.YES));
|
||||
doc.add(new SortedDocValuesField("id", new BytesRef(String.format(Locale.ROOT, "%09d", i))));
|
||||
if (groupValue != null) {
|
||||
addField(doc, groupField, groupValue);
|
||||
addField(doc, GROUP_FIELD, groupValue);
|
||||
}
|
||||
if (countValue != null) {
|
||||
addField(doc, countField, countValue);
|
||||
addField(doc, COUNT_FIELD, countValue);
|
||||
}
|
||||
doc.add(new TextField("content", content, Field.Store.YES));
|
||||
w.addDocument(doc);
|
||||
|
|
|
@ -39,7 +39,7 @@ public class LengthGoalBreakIteratorTest extends LuceneTestCase {
|
|||
// We do a '.' BreakIterator and test varying the length goal.
|
||||
// 0 1
|
||||
// 01234567890123456789
|
||||
final String content = "Aa bb. Cc dd. Ee ff";
|
||||
static final String CONTENT = "Aa bb. Cc dd. Ee ff";
|
||||
|
||||
public void testTargetLen() throws IOException {
|
||||
// "goal" means target length goal to find closest break
|
||||
|
@ -47,22 +47,22 @@ public class LengthGoalBreakIteratorTest extends LuceneTestCase {
|
|||
// at first word:
|
||||
Query query = query("aa");
|
||||
assertEquals("almost two sent",
|
||||
"<b>Aa</b> bb.", highlightClosestToLen(content, query, 9));
|
||||
"<b>Aa</b> bb.", highlightClosestToLen(CONTENT, query, 9));
|
||||
assertEquals( "barely two sent",
|
||||
"<b>Aa</b> bb. Cc dd.", highlightClosestToLen(content, query, 10));
|
||||
"<b>Aa</b> bb. Cc dd.", highlightClosestToLen(CONTENT, query, 10));
|
||||
assertEquals("long goal",
|
||||
"<b>Aa</b> bb. Cc dd. Ee ff", highlightClosestToLen(content, query, 17 + random().nextInt(20)));
|
||||
"<b>Aa</b> bb. Cc dd. Ee ff", highlightClosestToLen(CONTENT, query, 17 + random().nextInt(20)));
|
||||
|
||||
// at some word not at start of passage
|
||||
query = query("dd");
|
||||
assertEquals("short goal",
|
||||
" Cc <b>dd</b>.", highlightClosestToLen(content, query, random().nextInt(5)));
|
||||
" Cc <b>dd</b>.", highlightClosestToLen(CONTENT, query, random().nextInt(5)));
|
||||
assertEquals("almost two sent",
|
||||
" Cc <b>dd</b>.", highlightClosestToLen(content, query, 10));
|
||||
" Cc <b>dd</b>.", highlightClosestToLen(CONTENT, query, 10));
|
||||
assertEquals("barely two sent",
|
||||
" Cc <b>dd</b>. Ee ff", highlightClosestToLen(content, query, 11));
|
||||
" Cc <b>dd</b>. Ee ff", highlightClosestToLen(CONTENT, query, 11));
|
||||
assertEquals("long goal",
|
||||
" Cc <b>dd</b>. Ee ff", highlightClosestToLen(content, query, 12 + random().nextInt(20)));
|
||||
" Cc <b>dd</b>. Ee ff", highlightClosestToLen(CONTENT, query, 12 + random().nextInt(20)));
|
||||
}
|
||||
|
||||
public void testMinLen() throws IOException {
|
||||
|
@ -70,19 +70,19 @@ public class LengthGoalBreakIteratorTest extends LuceneTestCase {
|
|||
|
||||
Query query = query("dd");
|
||||
assertEquals("almost two sent",
|
||||
" Cc <b>dd</b>.", highlightMinLen(content, query, 6));
|
||||
" Cc <b>dd</b>.", highlightMinLen(CONTENT, query, 6));
|
||||
assertEquals("barely two sent",
|
||||
" Cc <b>dd</b>. Ee ff", highlightMinLen(content, query, 7));
|
||||
" Cc <b>dd</b>. Ee ff", highlightMinLen(CONTENT, query, 7));
|
||||
}
|
||||
|
||||
public void testDefaultSummaryTargetLen() throws IOException {
|
||||
Query query = query("zz");
|
||||
assertEquals("Aa bb.",
|
||||
highlightClosestToLen(content, query, random().nextInt(10))); // < 10
|
||||
highlightClosestToLen(CONTENT, query, random().nextInt(10))); // < 10
|
||||
assertEquals("Aa bb. Cc dd.",
|
||||
highlightClosestToLen(content, query, 10 + 6)); // cusp of adding 3rd sentence
|
||||
highlightClosestToLen(CONTENT, query, 10 + 6)); // cusp of adding 3rd sentence
|
||||
assertEquals("Aa bb. Cc dd. Ee ff",
|
||||
highlightClosestToLen(content, query, 17 + random().nextInt(20))); // >= 14
|
||||
highlightClosestToLen(CONTENT, query, 17 + random().nextInt(20))); // >= 14
|
||||
}
|
||||
|
||||
private Query query(String qStr) {
|
||||
|
|
|
@ -47,9 +47,9 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
|
||||
public abstract class AbstractTestCase extends LuceneTestCase {
|
||||
|
||||
protected final String F = "f";
|
||||
protected final String F1 = "f1";
|
||||
protected final String F2 = "f2";
|
||||
protected static final String F = "f";
|
||||
protected static final String F1 = "f1";
|
||||
protected static final String F2 = "f2";
|
||||
protected Directory dir;
|
||||
protected Analyzer analyzerW;
|
||||
protected Analyzer analyzerB;
|
||||
|
|
|
@ -42,18 +42,15 @@ import org.apache.lucene.queryparser.surround.query.SrndTruncQuery;
|
|||
*/
|
||||
|
||||
public class QueryParser implements QueryParserConstants {
|
||||
final int minimumPrefixLength = 3;
|
||||
final int minimumCharsInTrunc = 3;
|
||||
final String truncationErrorMessage = "Too unrestrictive truncation: ";
|
||||
final String boostErrorMessage = "Cannot handle boost value: ";
|
||||
static final int MINIMUM_PREFIX_LENGTH = 3;
|
||||
static final int MINIMUM_CHARS_IN_TRUNC = 3;
|
||||
static final String TRUNCATION_ERROR_MESSAGE = "Too unrestrictive truncation: ";
|
||||
static final String BOOST_ERROR_MESSAGE = "Cannot handle boost value: ";
|
||||
|
||||
/* CHECKME: These should be the same as for the tokenizer. How? */
|
||||
final char truncator = '*';
|
||||
final char anyChar = '?';
|
||||
final char quote = '"';
|
||||
final char fieldOperator = ':';
|
||||
final char comma = ','; /* prefix list separator */
|
||||
final char carat = '^'; /* weight operator */
|
||||
static final char TRUNCATOR = '*';
|
||||
static final char ANY_CHAR = '?';
|
||||
static final char FIELD_OPERATOR = ':';
|
||||
|
||||
static public SrndQuery parse(String query) throws ParseException {
|
||||
QueryParser parser = new QueryParser();
|
||||
|
@ -78,7 +75,7 @@ public class QueryParser implements QueryParserConstants {
|
|||
/* FIXME: check acceptable subquery: at least one subquery should not be
|
||||
* a fields query.
|
||||
*/
|
||||
return new FieldsQuery(q, fieldNames, fieldOperator);
|
||||
return new FieldsQuery(q, fieldNames, FIELD_OPERATOR);
|
||||
}
|
||||
|
||||
protected SrndQuery getOrQuery(List<SrndQuery> queries, boolean infix, Token orToken) {
|
||||
|
@ -128,12 +125,12 @@ public class QueryParser implements QueryParserConstants {
|
|||
}
|
||||
|
||||
protected boolean allowedSuffix(String suffixed) {
|
||||
return (suffixed.length() - 1) >= minimumPrefixLength;
|
||||
return (suffixed.length() - 1) >= MINIMUM_PREFIX_LENGTH;
|
||||
}
|
||||
|
||||
protected SrndQuery getPrefixQuery(
|
||||
String prefix, boolean quoted) {
|
||||
return new SrndPrefixQuery(prefix, quoted, truncator);
|
||||
return new SrndPrefixQuery(prefix, quoted, TRUNCATOR);
|
||||
}
|
||||
|
||||
protected boolean allowedTruncation(String truncated) {
|
||||
|
@ -141,15 +138,15 @@ public class QueryParser implements QueryParserConstants {
|
|||
int nrNormalChars = 0;
|
||||
for (int i = 0; i < truncated.length(); i++) {
|
||||
char c = truncated.charAt(i);
|
||||
if ((c != truncator) && (c != anyChar)) {
|
||||
if ((c != TRUNCATOR) && (c != ANY_CHAR)) {
|
||||
nrNormalChars++;
|
||||
}
|
||||
}
|
||||
return nrNormalChars >= minimumCharsInTrunc;
|
||||
return nrNormalChars >= MINIMUM_CHARS_IN_TRUNC;
|
||||
}
|
||||
|
||||
protected SrndQuery getTruncQuery(String truncated) {
|
||||
return new SrndTruncQuery(truncated, truncator, anyChar);
|
||||
return new SrndTruncQuery(truncated, TRUNCATOR, ANY_CHAR);
|
||||
}
|
||||
|
||||
final public SrndQuery TopSrndQuery() throws ParseException {
|
||||
|
@ -437,7 +434,7 @@ public class QueryParser implements QueryParserConstants {
|
|||
term = jj_consume_token(SUFFIXTERM);
|
||||
/* ending in * */
|
||||
if (! allowedSuffix(term.image)) {
|
||||
{if (true) throw new ParseException(truncationErrorMessage + term.image);}
|
||||
{if (true) throw new ParseException(TRUNCATION_ERROR_MESSAGE + term.image);}
|
||||
}
|
||||
{if (true) return getPrefixQuery(term.image.substring(0, term.image.length()-1), false /* not quoted */);}
|
||||
break;
|
||||
|
@ -445,15 +442,15 @@ public class QueryParser implements QueryParserConstants {
|
|||
term = jj_consume_token(TRUNCTERM);
|
||||
/* with at least one * or ? */
|
||||
if (! allowedTruncation(term.image)) {
|
||||
{if (true) throw new ParseException(truncationErrorMessage + term.image);}
|
||||
{if (true) throw new ParseException(TRUNCATION_ERROR_MESSAGE + term.image);}
|
||||
}
|
||||
{if (true) return getTruncQuery(term.image);}
|
||||
break;
|
||||
case TRUNCQUOTED:
|
||||
term = jj_consume_token(TRUNCQUOTED);
|
||||
/* eg. "9b-b,m"* */
|
||||
if ((term.image.length() - 3) < minimumPrefixLength) {
|
||||
{if (true) throw new ParseException(truncationErrorMessage + term.image);}
|
||||
if ((term.image.length() - 3) < MINIMUM_PREFIX_LENGTH) {
|
||||
{if (true) throw new ParseException(TRUNCATION_ERROR_MESSAGE + term.image);}
|
||||
}
|
||||
{if (true) return getPrefixQuery(term.image.substring(1, term.image.length()-2), true /* quoted */);}
|
||||
break;
|
||||
|
@ -483,10 +480,10 @@ public class QueryParser implements QueryParserConstants {
|
|||
try {
|
||||
f = Float.parseFloat(weight.image);
|
||||
} catch (Exception floatExc) {
|
||||
{if (true) throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")");}
|
||||
{if (true) throw new ParseException(BOOST_ERROR_MESSAGE + weight.image + " (" + floatExc + ")");}
|
||||
}
|
||||
if (f <= 0.0) {
|
||||
{if (true) throw new ParseException(boostErrorMessage + weight.image);}
|
||||
{if (true) throw new ParseException(BOOST_ERROR_MESSAGE + weight.image);}
|
||||
}
|
||||
q.setWeight(f * q.getWeight()); /* left associative, fwiw */
|
||||
|
||||
|
|
|
@ -73,19 +73,16 @@ import org.apache.lucene.queryparser.surround.query.SrndTruncQuery;
|
|||
*/
|
||||
|
||||
public class QueryParser {
|
||||
final int minimumPrefixLength = 3;
|
||||
final int minimumCharsInTrunc = 3;
|
||||
final String truncationErrorMessage = "Too unrestrictive truncation: ";
|
||||
final String boostErrorMessage = "Cannot handle boost value: ";
|
||||
|
||||
static final int MINIMUM_PREFIX_LENGTH = 3;
|
||||
static final int MINIMUM_CHARS_IN_TRUNC = 3;
|
||||
static final String TRUNCATION_ERROR_MESSAGE = "Too unrestrictive truncation: ";
|
||||
static final String BOOST_ERROR_MESSAGE = "Cannot handle boost value: ";
|
||||
|
||||
/* CHECKME: These should be the same as for the tokenizer. How? */
|
||||
final char truncator = '*';
|
||||
final char anyChar = '?';
|
||||
final char quote = '"';
|
||||
final char fieldOperator = ':';
|
||||
final char comma = ','; /* prefix list separator */
|
||||
final char carat = '^'; /* weight operator */
|
||||
|
||||
static final char TRUNCATOR = '*';
|
||||
static final char ANY_CHAR = '?';
|
||||
static final char FIELD_OPERATOR = ':';
|
||||
|
||||
static public SrndQuery parse(String query) throws ParseException {
|
||||
QueryParser parser = new QueryParser();
|
||||
return parser.parse2(query);
|
||||
|
@ -109,7 +106,7 @@ public class QueryParser {
|
|||
/* FIXME: check acceptable subquery: at least one subquery should not be
|
||||
* a fields query.
|
||||
*/
|
||||
return new FieldsQuery(q, fieldNames, fieldOperator);
|
||||
return new FieldsQuery(q, fieldNames, FIELD_OPERATOR);
|
||||
}
|
||||
|
||||
protected SrndQuery getOrQuery(List<SrndQuery> queries, boolean infix, Token orToken) {
|
||||
|
@ -159,12 +156,12 @@ public class QueryParser {
|
|||
}
|
||||
|
||||
protected boolean allowedSuffix(String suffixed) {
|
||||
return (suffixed.length() - 1) >= minimumPrefixLength;
|
||||
return (suffixed.length() - 1) >= MINIMUM_PREFIX_LENGTH;
|
||||
}
|
||||
|
||||
protected SrndQuery getPrefixQuery(
|
||||
String prefix, boolean quoted) {
|
||||
return new SrndPrefixQuery(prefix, quoted, truncator);
|
||||
return new SrndPrefixQuery(prefix, quoted, TRUNCATOR);
|
||||
}
|
||||
|
||||
protected boolean allowedTruncation(String truncated) {
|
||||
|
@ -172,15 +169,15 @@ public class QueryParser {
|
|||
int nrNormalChars = 0;
|
||||
for (int i = 0; i < truncated.length(); i++) {
|
||||
char c = truncated.charAt(i);
|
||||
if ((c != truncator) && (c != anyChar)) {
|
||||
if ((c != TRUNCATOR) && (c != ANY_CHAR)) {
|
||||
nrNormalChars++;
|
||||
}
|
||||
}
|
||||
return nrNormalChars >= minimumCharsInTrunc;
|
||||
return nrNormalChars >= MINIMUM_CHARS_IN_TRUNC;
|
||||
}
|
||||
|
||||
protected SrndQuery getTruncQuery(String truncated) {
|
||||
return new SrndTruncQuery(truncated, truncator, anyChar);
|
||||
return new SrndTruncQuery(truncated, TRUNCATOR, ANY_CHAR);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -432,21 +429,21 @@ SrndQuery SimpleTerm() : {
|
|||
|
||||
| term=<SUFFIXTERM> { /* ending in * */
|
||||
if (! allowedSuffix(term.image)) {
|
||||
throw new ParseException(truncationErrorMessage + term.image);
|
||||
throw new ParseException(TRUNCATION_ERROR_MESSAGE + term.image);
|
||||
}
|
||||
return getPrefixQuery(term.image.substring(0, term.image.length()-1), false /* not quoted */);
|
||||
}
|
||||
|
||||
| term=<TRUNCTERM> { /* with at least one * or ? */
|
||||
if (! allowedTruncation(term.image)) {
|
||||
throw new ParseException(truncationErrorMessage + term.image);
|
||||
throw new ParseException(TRUNCATION_ERROR_MESSAGE + term.image);
|
||||
}
|
||||
return getTruncQuery(term.image);
|
||||
}
|
||||
|
||||
| term=<TRUNCQUOTED> { /* eg. "9b-b,m"* */
|
||||
if ((term.image.length() - 3) < minimumPrefixLength) {
|
||||
throw new ParseException(truncationErrorMessage + term.image);
|
||||
if ((term.image.length() - 3) < MINIMUM_PREFIX_LENGTH) {
|
||||
throw new ParseException(TRUNCATION_ERROR_MESSAGE + term.image);
|
||||
}
|
||||
return getPrefixQuery(term.image.substring(1, term.image.length()-2), true /* quoted */);
|
||||
}
|
||||
|
@ -462,10 +459,10 @@ void OptionalWeights(SrndQuery q) : {
|
|||
try {
|
||||
f = Float.parseFloat(weight.image);
|
||||
} catch (Exception floatExc) {
|
||||
throw new ParseException(boostErrorMessage + weight.image + " (" + floatExc + ")");
|
||||
throw new ParseException(BOOST_ERROR_MESSAGE + weight.image + " (" + floatExc + ")");
|
||||
}
|
||||
if (f <= 0.0) {
|
||||
throw new ParseException(boostErrorMessage + weight.image);
|
||||
throw new ParseException(BOOST_ERROR_MESSAGE + weight.image);
|
||||
}
|
||||
q.setWeight(f * q.getWeight()); /* left associative, fwiw */
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ public class FieldsQuery extends SrndQuery { /* mostly untested */
|
|||
private SrndQuery q;
|
||||
private List<String> fieldNames;
|
||||
private final char fieldOp;
|
||||
private final String OrOperatorName = "OR"; /* for expanded queries, not normally visible */
|
||||
private static final String OR_OPERATOR_NAME = "OR"; /* for expanded queries, not normally visible */
|
||||
|
||||
public FieldsQuery(SrndQuery q, List<String> fieldNames, char fieldOp) {
|
||||
this.q = q;
|
||||
|
@ -61,7 +61,7 @@ public class FieldsQuery extends SrndQuery { /* mostly untested */
|
|||
}
|
||||
OrQuery oq = new OrQuery(queries,
|
||||
true /* infix OR for field names */,
|
||||
OrOperatorName);
|
||||
OR_OPERATOR_NAME);
|
||||
// System.out.println(getClass().toString() + ", fields expanded: " + oq.toString()); /* needs testing */
|
||||
return oq.makeLuceneQueryField(null, qf);
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import static org.junit.Assert.assertTrue;
|
|||
|
||||
public class GeoBBoxTest {
|
||||
|
||||
protected final double DEGREES_TO_RADIANS = Math.PI / 180.0;
|
||||
protected static final double DEGREES_TO_RADIANS = Math.PI / 180.0;
|
||||
|
||||
@Test
|
||||
public void testBBoxDegenerate() {
|
||||
|
|
|
@ -60,7 +60,7 @@ import org.apache.lucene.util.TestUtil;
|
|||
|
||||
public final class MockRandomPostingsFormat extends PostingsFormat {
|
||||
private final Random seedRandom;
|
||||
private final String SEED_EXT = "sd";
|
||||
private static final String SEED_EXT = "sd";
|
||||
|
||||
public MockRandomPostingsFormat() {
|
||||
// This ctor should *only* be used at read-time: get NPE if you use it!
|
||||
|
|
|
@ -35,34 +35,34 @@ import org.xml.sax.SAXException;
|
|||
*/
|
||||
public class AnalyticsContentHandler implements ContentHandler {
|
||||
// XML Element/Attribute Name Constants
|
||||
public final String ANALYTICS_REQUEST_ENVELOPE="analyticsRequestEnvelope";
|
||||
public static final String ANALYTICS_REQUEST_ENVELOPE="analyticsRequestEnvelope";
|
||||
|
||||
public final String ANALYTICS_REQUEST="analyticsRequest";
|
||||
public final String NAME="name";
|
||||
public static final String ANALYTICS_REQUEST="analyticsRequest";
|
||||
public static final String NAME="name";
|
||||
|
||||
public final String STATISTIC="statistic";
|
||||
public final String EXPRESSION="expression";
|
||||
public static final String STATISTIC="statistic";
|
||||
public static final String EXPRESSION="expression";
|
||||
|
||||
public final String FIELD_FACET="fieldFacet";
|
||||
public final String FIELD="field";
|
||||
public final String SHOW_MISSING="showMissing";
|
||||
public final String LIMIT="limit";
|
||||
public final String MIN_COUNT="minCount";
|
||||
public static final String FIELD_FACET="fieldFacet";
|
||||
public static final String FIELD="field";
|
||||
public static final String SHOW_MISSING="showMissing";
|
||||
public static final String LIMIT="limit";
|
||||
public static final String MIN_COUNT="minCount";
|
||||
|
||||
public final String SORT_SPECIFICATION="sortSpecification";
|
||||
public final String STAT_NAME="statName";
|
||||
public final String DIRECTION="direction";
|
||||
public static final String SORT_SPECIFICATION="sortSpecification";
|
||||
public static final String STAT_NAME="statName";
|
||||
public static final String DIRECTION="direction";
|
||||
|
||||
public final String RANGE_FACET="rangeFacet";
|
||||
public final String START="start";
|
||||
public final String END="end";
|
||||
public final String GAP="gap";
|
||||
public final String INCLUDE_BOUNDARY="includeBoundary";
|
||||
public final String OTHER_RANGE="otherRange";
|
||||
public final String HARD_END="hardend";
|
||||
public static final String RANGE_FACET="rangeFacet";
|
||||
public static final String START="start";
|
||||
public static final String END="end";
|
||||
public static final String GAP="gap";
|
||||
public static final String INCLUDE_BOUNDARY="includeBoundary";
|
||||
public static final String OTHER_RANGE="otherRange";
|
||||
public static final String HARD_END="hardend";
|
||||
|
||||
public final String QUERY_FACET="queryFacet";
|
||||
public final String QUERY="query";
|
||||
public static final String QUERY_FACET="queryFacet";
|
||||
public static final String QUERY="query";
|
||||
|
||||
// Default Values
|
||||
public static final int DEFAULT_FACET_LIMIT = -1;
|
||||
|
|
|
@ -66,7 +66,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("id", "1", "desC", "one"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigWithCaseInsensitiveFields);
|
||||
runFullImport(DATA_CONFIG_WITH_CASE_INSENSITIVE_FIELDS);
|
||||
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertTrue("Start event listener was not called", StartEventListener.executed);
|
||||
|
@ -81,7 +81,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("id", "1", "FORCE_ERROR", "true"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigWithErrorHandler);
|
||||
runFullImport(DATA_CONFIG_WITH_ERROR_HANDLER);
|
||||
|
||||
assertTrue("Error event listener was not called", ErrorEventListener.executed);
|
||||
assertTrue(ErrorEventListener.lastException.getMessage().contains("ForcedException"));
|
||||
|
@ -94,7 +94,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("id", "1", "desc", "one"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigWithDynamicTransformer);
|
||||
runFullImport(DATA_CONFIG_WITH_DYNAMIC_TRANSFORMER);
|
||||
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("dynamic_s:test"), "//*[@numFound='1']");
|
||||
|
@ -110,7 +110,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import",
|
||||
"debug", "on", "clean", "true", "commit", "true",
|
||||
"category", "search",
|
||||
"dataConfig", requestParamAsVariable);
|
||||
"dataConfig", REQUEST_PARAM_AS_VARIABLE);
|
||||
h.query("/dataimport", request);
|
||||
assertQ(req("desc:ApacheSolr"), "//*[@numFound='1']");
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
|
||||
LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import",
|
||||
"debug", "on", "clean", "true", "commit", "true",
|
||||
"dataConfig", dataConfigWithDynamicFieldNames);
|
||||
"dataConfig", DATA_CONFIG_WITH_DYNAMIC_FIELD_NAMES);
|
||||
h.query("/dataimport", request);
|
||||
assertQ(req("id:101"), "//*[@numFound='1']", "//*[@name='101_s']");
|
||||
}
|
||||
|
@ -139,7 +139,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import",
|
||||
"debug", "on", "clean", "true", "commit", "true",
|
||||
"mypk", "id", "text", "desc",
|
||||
"dataConfig", dataConfigWithTemplatizedFieldNames);
|
||||
"dataConfig", DATA_CONFIG_WITH_TEMPLATIZED_FIELD_NAMES);
|
||||
h.query("/dataimport", request);
|
||||
assertQ(req("id:101"), "//*[@numFound='1']");
|
||||
}
|
||||
|
@ -162,7 +162,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("id", "2", "desc", "two", DocBuilder.SKIP_DOC, "true"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigWithDynamicTransformer);
|
||||
runFullImport(DATA_CONFIG_WITH_DYNAMIC_TRANSFORMER);
|
||||
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("id:2"), "//*[@numFound='0']");
|
||||
|
@ -176,7 +176,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("id", "2", "desc", "two", DocBuilder.SKIP_ROW, "true"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigWithDynamicTransformer);
|
||||
runFullImport(DATA_CONFIG_WITH_DYNAMIC_TRANSFORMER);
|
||||
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("id:2"), "//*[@numFound='0']");
|
||||
|
@ -196,7 +196,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("name_s", "xyz", DocBuilder.SKIP_ROW, "true"));
|
||||
MockDataSource.setIterator("4", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigWithTwoEntities);
|
||||
runFullImport(DATA_CONFIG_WITH_TWO_ENTITIES);
|
||||
assertQ(req("id:3"), "//*[@numFound='1']");
|
||||
assertQ(req("id:4"), "//*[@numFound='1']");
|
||||
assertQ(req("name_s:abcd"), "//*[@numFound='1']");
|
||||
|
@ -211,7 +211,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("id", "2", "desc", "two", "$stopTransform", "true"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigForSkipTransform);
|
||||
runFullImport(DATA_CONFIG_FOR_SKIP_TRANSFORM);
|
||||
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("id:2"), "//*[@numFound='1']");
|
||||
|
@ -227,7 +227,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("id", "3", "desc", "two", DocBuilder.DELETE_DOC_BY_ID, "2"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigForSkipTransform);
|
||||
runFullImport(DATA_CONFIG_FOR_SKIP_TRANSFORM);
|
||||
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("id:2"), "//*[@numFound='0']");
|
||||
|
@ -243,7 +243,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows.add(createMap("id", "3", "desc", "two", DocBuilder.DELETE_DOC_BY_QUERY, "desc:one"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
|
||||
runFullImport(dataConfigForSkipTransform);
|
||||
runFullImport(DATA_CONFIG_FOR_SKIP_TRANSFORM);
|
||||
|
||||
assertQ(req("id:1"), "//*[@numFound='0']");
|
||||
assertQ(req("id:2"), "//*[@numFound='0']");
|
||||
|
@ -256,7 +256,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
rows = new ArrayList();
|
||||
rows.add(createMap(DocBuilder.DELETE_DOC_BY_ID, "3"));
|
||||
MockDataSource.setIterator("select * from x", rows.iterator());
|
||||
runFullImport(dataConfigForSkipTransform, createMap("clean","false"));
|
||||
runFullImport(DATA_CONFIG_FOR_SKIP_TRANSFORM, createMap("clean","false"));
|
||||
assertQ(req("id:3"), "//*[@numFound='0']");
|
||||
|
||||
assertTrue("Update request processor processDelete was not called", TestUpdateRequestProcessor.processDeleteCalled);
|
||||
|
@ -274,12 +274,12 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true);
|
||||
createFile(tmpdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), true);
|
||||
createFile(tmpdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), true);
|
||||
runFullImport(dataConfigFileList, params);
|
||||
runFullImport(DATA_CONFIG_FILE_LIST, params);
|
||||
assertQ(req("*:*"), "//*[@numFound='3']");
|
||||
|
||||
// Add a new file after a full index is done
|
||||
createFile(tmpdir, "t.xml", "t.xml".getBytes(StandardCharsets.UTF_8), false);
|
||||
runFullImport(dataConfigFileList, params);
|
||||
runFullImport(DATA_CONFIG_FILE_LIST, params);
|
||||
// we should find only 1 because by default clean=true is passed
|
||||
// and this particular import should find only one file t.xml
|
||||
assertQ(req("*:*"), "//*[@numFound='1']");
|
||||
|
@ -342,7 +342,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private final String requestParamAsVariable = "<dataConfig>\n" +
|
||||
private static final String REQUEST_PARAM_AS_VARIABLE = "<dataConfig>\n" +
|
||||
" <dataSource type=\"MockDataSource\" />\n" +
|
||||
" <document>\n" +
|
||||
" <entity name=\"books\" query=\"select * from books where category='${dataimporter.request.category}'\">\n" +
|
||||
|
@ -352,7 +352,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
" </document>\n" +
|
||||
"</dataConfig>";
|
||||
|
||||
private final String dataConfigWithDynamicTransformer = "<dataConfig> <dataSource type=\"MockDataSource\"/>\n" +
|
||||
private static final String DATA_CONFIG_WITH_DYNAMIC_TRANSFORMER = "<dataConfig> <dataSource type=\"MockDataSource\"/>\n" +
|
||||
" <document>\n" +
|
||||
" <entity name=\"books\" query=\"select * from x\"" +
|
||||
" transformer=\"TestDocBuilder2$AddDynamicFieldTransformer\">\n" +
|
||||
|
@ -362,7 +362,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
" </document>\n" +
|
||||
"</dataConfig>";
|
||||
|
||||
private final String dataConfigForSkipTransform = "<dataConfig> <dataSource type=\"MockDataSource\"/>\n" +
|
||||
private static final String DATA_CONFIG_FOR_SKIP_TRANSFORM = "<dataConfig> <dataSource type=\"MockDataSource\"/>\n" +
|
||||
" <document>\n" +
|
||||
" <entity name=\"books\" query=\"select * from x\"" +
|
||||
" transformer=\"TemplateTransformer\">\n" +
|
||||
|
@ -373,7 +373,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
" </document>\n" +
|
||||
"</dataConfig>";
|
||||
|
||||
private final String dataConfigWithTwoEntities = "<dataConfig><dataSource type=\"MockDataSource\"/>\n" +
|
||||
private static final String DATA_CONFIG_WITH_TWO_ENTITIES = "<dataConfig><dataSource type=\"MockDataSource\"/>\n" +
|
||||
" <document>\n" +
|
||||
" <entity name=\"books\" query=\"select * from x\">" +
|
||||
" <field column=\"id\" />\n" +
|
||||
|
@ -385,7 +385,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
" </document>\n" +
|
||||
"</dataConfig>";
|
||||
|
||||
private final String dataConfigWithCaseInsensitiveFields = "<dataConfig> <dataSource type=\"MockDataSource\"/>\n" +
|
||||
private static final String DATA_CONFIG_WITH_CASE_INSENSITIVE_FIELDS = "<dataConfig> <dataSource type=\"MockDataSource\"/>\n" +
|
||||
" <document onImportStart=\"TestDocBuilder2$StartEventListener\" onImportEnd=\"TestDocBuilder2$EndEventListener\">\n" +
|
||||
" <entity name=\"books\" query=\"select * from x\">\n" +
|
||||
" <field column=\"ID\" />\n" +
|
||||
|
@ -394,7 +394,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
" </document>\n" +
|
||||
"</dataConfig>";
|
||||
|
||||
private final String dataConfigWithErrorHandler = "<dataConfig> <dataSource type=\"MockDataSource\"/>\n" +
|
||||
private static final String DATA_CONFIG_WITH_ERROR_HANDLER = "<dataConfig> <dataSource type=\"MockDataSource\"/>\n" +
|
||||
" <document onError=\"TestDocBuilder2$ErrorEventListener\">\n" +
|
||||
" <entity name=\"books\" query=\"select * from x\" transformer=\"TestDocBuilder2$ForcedExceptionTransformer\">\n" +
|
||||
" <field column=\"id\" />\n" +
|
||||
|
@ -403,7 +403,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
" </document>\n" +
|
||||
"</dataConfig>";
|
||||
|
||||
private final String dataConfigWithTemplatizedFieldNames = "<dataConfig><dataSource type=\"MockDataSource\"/>\n" +
|
||||
private static final String DATA_CONFIG_WITH_TEMPLATIZED_FIELD_NAMES = "<dataConfig><dataSource type=\"MockDataSource\"/>\n" +
|
||||
" <document>\n" +
|
||||
" <entity name=\"books\" query=\"select * from x\">\n" +
|
||||
" <field column=\"mypk\" name=\"${dih.request.mypk}\" />\n" +
|
||||
|
@ -412,7 +412,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
" </document>\n" +
|
||||
"</dataConfig>";
|
||||
|
||||
private final String dataConfigWithDynamicFieldNames = "<dataConfig><dataSource type=\"MockDataSource\"/>\n" +
|
||||
private static final String DATA_CONFIG_WITH_DYNAMIC_FIELD_NAMES = "<dataConfig><dataSource type=\"MockDataSource\"/>\n" +
|
||||
" <document>\n" +
|
||||
" <entity name=\"books\" query=\"select * from x\">\n" +
|
||||
" <field column=\"mypk\" name=\"id\" />\n" +
|
||||
|
@ -421,7 +421,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
|
|||
" </document>\n" +
|
||||
"</dataConfig>";
|
||||
|
||||
private final String dataConfigFileList = "<dataConfig>\n" +
|
||||
private static final String DATA_CONFIG_FILE_LIST = "<dataConfig>\n" +
|
||||
"\t<document>\n" +
|
||||
"\t\t<entity name=\"x\" processor=\"FileListEntityProcessor\" \n" +
|
||||
"\t\t\t\tfileName=\".*\" newerThan=\"${dih.last_index_time}\" \n" +
|
||||
|
|
|
@ -155,7 +155,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
|
|||
|
||||
int totalDocsNum = parentsNum + childrenNum + grandChildrenNum;
|
||||
|
||||
runFullImport(threeLevelHierarchyConfig);
|
||||
runFullImport(THREE_LEVEL_HIERARCHY_CONFIG);
|
||||
|
||||
assertTrue("Update request processor processAdd was not called", TestUpdateRequestProcessor.processAddCalled);
|
||||
assertTrue("Update request processor processCommit was not callled", TestUpdateRequestProcessor.processCommitCalled);
|
||||
|
@ -333,9 +333,9 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
|
|||
String children = createChildren(parentType, 0, depth, parentData, holder);
|
||||
|
||||
String rootFields = createFieldsList(FIELD_ID, "desc", "type_s");
|
||||
String rootEntity = StrUtils.formatString(rootEntityTemplate, parentType, "SELECT * FROM " + parentType, rootFields, children);
|
||||
String rootEntity = StrUtils.formatString(ROOT_ENTITY_TEMPLATE, parentType, "SELECT * FROM " + parentType, rootFields, children);
|
||||
|
||||
String config = StrUtils.formatString(dataConfigTemplate, rootEntity);
|
||||
String config = StrUtils.formatString(DATA_CONFIG_TEMPLATE, rootEntity);
|
||||
return config;
|
||||
}
|
||||
|
||||
|
@ -396,7 +396,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
|
|||
List<Hierarchy> childData = createMockedIterator(childName, parentData, holder);
|
||||
|
||||
String subChildren = createChildren(childName, currentLevel + 1, maxLevel, childData, holder);
|
||||
String child = StrUtils.formatString(childEntityTemplate, childName, select, fields, subChildren);
|
||||
String child = StrUtils.formatString(CHILD_ENTITY_TEMPLATE, childName, select, fields, subChildren);
|
||||
builder.append(child);
|
||||
builder.append('\n');
|
||||
}
|
||||
|
@ -414,7 +414,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
|
|||
return builder.toString();
|
||||
}
|
||||
|
||||
private final String threeLevelHierarchyConfig = "<dataConfig>\n" +
|
||||
private static final String THREE_LEVEL_HIERARCHY_CONFIG = "<dataConfig>\n" +
|
||||
" <dataSource type='MockDataSource' />\n" +
|
||||
" <document>\n" +
|
||||
" <entity name='PARENT' query='select * from PARENT'>\n" +
|
||||
|
@ -436,7 +436,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
|
|||
"</dataConfig>";
|
||||
|
||||
/** {0} is rootEntity block **/
|
||||
private final String dataConfigTemplate = "<dataConfig><dataSource type=\"MockDataSource\" />\n<document>\n {0}</document></dataConfig>";
|
||||
private static final String DATA_CONFIG_TEMPLATE = "<dataConfig><dataSource type=\"MockDataSource\" />\n<document>\n {0}</document></dataConfig>";
|
||||
|
||||
/**
|
||||
* {0} - entityName,
|
||||
|
@ -444,7 +444,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
|
|||
* {2} - fieldsList
|
||||
* {3} - childEntitiesList
|
||||
**/
|
||||
private final String rootEntityTemplate = "<entity name=\"{0}\" query=\"{1}\">\n{2} {3}\n</entity>\n";
|
||||
private static final String ROOT_ENTITY_TEMPLATE = "<entity name=\"{0}\" query=\"{1}\">\n{2} {3}\n</entity>\n";
|
||||
|
||||
/**
|
||||
* {0} - entityName,
|
||||
|
@ -452,7 +452,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
|
|||
* {2} - fieldsList
|
||||
* {3} - childEntitiesList
|
||||
**/
|
||||
private final String childEntityTemplate = "<entity " + ConfigNameConstants.CHILD + "=\"true\" name=\"{0}\" query=\"{1}\">\n {2} {3} </entity>\n";
|
||||
private static final String CHILD_ENTITY_TEMPLATE = "<entity " + ConfigNameConstants.CHILD + "=\"true\" name=\"{0}\" query=\"{1}\">\n {2} {3} </entity>\n";
|
||||
|
||||
private BitSetProducer createParentFilter(String type) {
|
||||
BooleanQuery.Builder parentQuery = new BooleanQuery.Builder();
|
||||
|
|
|
@ -36,7 +36,7 @@ public class DistributedMap {
|
|||
|
||||
protected SolrZkClient zookeeper;
|
||||
|
||||
protected final String prefix = "mn-";
|
||||
protected static final String PREFIX = "mn-";
|
||||
|
||||
public DistributedMap(SolrZkClient zookeeper, String dir) {
|
||||
this.dir = dir;
|
||||
|
@ -56,15 +56,15 @@ public class DistributedMap {
|
|||
|
||||
|
||||
public void put(String trackingId, byte[] data) throws KeeperException, InterruptedException {
|
||||
zookeeper.makePath(dir + "/" + prefix + trackingId, data, CreateMode.PERSISTENT, null, false, true);
|
||||
zookeeper.makePath(dir + "/" + PREFIX + trackingId, data, CreateMode.PERSISTENT, null, false, true);
|
||||
}
|
||||
|
||||
public byte[] get(String trackingId) throws KeeperException, InterruptedException {
|
||||
return zookeeper.getData(dir + "/" + prefix + trackingId, null, null, true);
|
||||
return zookeeper.getData(dir + "/" + PREFIX + trackingId, null, null, true);
|
||||
}
|
||||
|
||||
public boolean contains(String trackingId) throws KeeperException, InterruptedException {
|
||||
return zookeeper.exists(dir + "/" + prefix + trackingId, true);
|
||||
return zookeeper.exists(dir + "/" + PREFIX + trackingId, true);
|
||||
}
|
||||
|
||||
public int size() throws KeeperException, InterruptedException {
|
||||
|
@ -80,7 +80,7 @@ public class DistributedMap {
|
|||
*/
|
||||
public boolean remove(String trackingId) throws KeeperException, InterruptedException {
|
||||
try {
|
||||
zookeeper.delete(dir + "/" + prefix + trackingId, -1, true);
|
||||
zookeeper.delete(dir + "/" + PREFIX + trackingId, -1, true);
|
||||
} catch (KeeperException.NoNodeException e) {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.slf4j.LoggerFactory;
|
|||
public class OverseerTaskQueue extends DistributedQueue {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
private final String response_prefix = "qnr-" ;
|
||||
private static final String RESPONSE_PREFIX = "qnr-" ;
|
||||
|
||||
public OverseerTaskQueue(SolrZkClient zookeeper, String dir) {
|
||||
this(zookeeper, dir, new Overseer.Stats());
|
||||
|
@ -88,7 +88,7 @@ public class OverseerTaskQueue extends DistributedQueue {
|
|||
Timer.Context time = stats.time(dir + "_remove_event");
|
||||
try {
|
||||
String path = event.getId();
|
||||
String responsePath = dir + "/" + response_prefix
|
||||
String responsePath = dir + "/" + RESPONSE_PREFIX
|
||||
+ path.substring(path.lastIndexOf("-") + 1);
|
||||
if (zookeeper.exists(responsePath, true)) {
|
||||
zookeeper.setData(responsePath, event.getBytes(), true);
|
||||
|
@ -217,7 +217,7 @@ public class OverseerTaskQueue extends DistributedQueue {
|
|||
|
||||
String createResponseNode() throws KeeperException, InterruptedException {
|
||||
return createData(
|
||||
dir + "/" + response_prefix,
|
||||
dir + "/" + RESPONSE_PREFIX,
|
||||
null, CreateMode.EPHEMERAL_SEQUENTIAL);
|
||||
}
|
||||
|
||||
|
|
|
@ -504,7 +504,7 @@ public class CurrencyField extends FieldType implements SchemaAware, ResourceLoa
|
|||
final FunctionValues currencies = currencyValues.getValues(context, reader);
|
||||
|
||||
return new FunctionValues() {
|
||||
private final int MAX_CURRENCIES_TO_CACHE = 256;
|
||||
private static final int MAX_CURRENCIES_TO_CACHE = 256;
|
||||
private final int[] fractionDigitCache = new int[MAX_CURRENCIES_TO_CACHE];
|
||||
private final String[] currencyOrdToCurrencyCache = new String[MAX_CURRENCIES_TO_CACHE];
|
||||
private final double[] exchangeRateCache = new double[MAX_CURRENCIES_TO_CACHE];
|
||||
|
|
|
@ -48,7 +48,7 @@ public final class CommitTracker implements Runnable {
|
|||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
// scheduler delay for maxDoc-triggered autocommits
|
||||
public final int DOC_COMMIT_DELAY_MS = 1;
|
||||
public static final int DOC_COMMIT_DELAY_MS = 1;
|
||||
|
||||
// settings, not final so we can change them in testing
|
||||
private int docsUpperBound;
|
||||
|
@ -66,7 +66,7 @@ public final class CommitTracker implements Runnable {
|
|||
|
||||
private final boolean softCommit;
|
||||
private boolean openSearcher;
|
||||
private final boolean waitSearcher = true;
|
||||
private static final boolean WAIT_SEARCHER = true;
|
||||
|
||||
private String name;
|
||||
|
||||
|
@ -205,7 +205,7 @@ public final class CommitTracker implements Runnable {
|
|||
try {
|
||||
CommitUpdateCommand command = new CommitUpdateCommand(req, false);
|
||||
command.openSearcher = openSearcher;
|
||||
command.waitSearcher = waitSearcher;
|
||||
command.waitSearcher = WAIT_SEARCHER;
|
||||
command.softCommit = softCommit;
|
||||
if (core.getCoreDescriptor().getCloudDescriptor() != null
|
||||
&& core.getCoreDescriptor().getCloudDescriptor().isLeader()
|
||||
|
|
|
@ -1105,7 +1105,7 @@ public class SimplePostTool {
|
|||
//
|
||||
class PageFetcher {
|
||||
Map<String, List<String>> robotsCache;
|
||||
final String DISALLOW = "Disallow:";
|
||||
static final String DISALLOW = "Disallow:";
|
||||
|
||||
public PageFetcher() {
|
||||
robotsCache = new HashMap<>();
|
||||
|
|
|
@ -81,8 +81,8 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
private List<ZkStateReader> readers = new ArrayList<>();
|
||||
private List<HttpShardHandlerFactory> httpShardHandlerFactorys = new ArrayList<>();
|
||||
private List<UpdateShardHandler> updateShardHandlers = new ArrayList<>();
|
||||
|
||||
final private String collection = SolrTestCaseJ4.DEFAULT_TEST_COLLECTION_NAME;
|
||||
|
||||
private static final String COLLECTION = SolrTestCaseJ4.DEFAULT_TEST_COLLECTION_NAME;
|
||||
|
||||
public static class MockZKController{
|
||||
|
||||
|
@ -271,17 +271,17 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
final int numShards=6;
|
||||
|
||||
for (int i = 0; i < numShards; i++) {
|
||||
assertNotNull("shard got no id?", zkController.publishState(collection, "core" + (i+1), "node" + (i+1), Replica.State.ACTIVE, 3));
|
||||
assertNotNull("shard got no id?", zkController.publishState(COLLECTION, "core" + (i+1), "node" + (i+1), Replica.State.ACTIVE, 3));
|
||||
}
|
||||
final Map<String,Replica> rmap = reader.getClusterState().getSlice(collection, "shard1").getReplicasMap();
|
||||
final Map<String,Replica> rmap = reader.getClusterState().getSlice(COLLECTION, "shard1").getReplicasMap();
|
||||
assertEquals(rmap.toString(), 2, rmap.size());
|
||||
assertEquals(rmap.toString(), 2, reader.getClusterState().getSlice(collection, "shard2").getReplicasMap().size());
|
||||
assertEquals(rmap.toString(), 2, reader.getClusterState().getSlice(collection, "shard3").getReplicasMap().size());
|
||||
assertEquals(rmap.toString(), 2, reader.getClusterState().getSlice(COLLECTION, "shard2").getReplicasMap().size());
|
||||
assertEquals(rmap.toString(), 2, reader.getClusterState().getSlice(COLLECTION, "shard3").getReplicasMap().size());
|
||||
|
||||
//make sure leaders are in cloud state
|
||||
assertNotNull(reader.getLeaderUrl(collection, "shard1", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(collection, "shard2", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(collection, "shard3", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(COLLECTION, "shard2", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(COLLECTION, "shard3", 15000));
|
||||
|
||||
} finally {
|
||||
close(zkClient);
|
||||
|
@ -321,17 +321,17 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
final int numShards=3;
|
||||
|
||||
for (int i = 0; i < numShards; i++) {
|
||||
assertNotNull("shard got no id?", zkController.publishState(collection, "core" + (i+1), "node" + (i+1), Replica.State.ACTIVE, 3));
|
||||
assertNotNull("shard got no id?", zkController.publishState(COLLECTION, "core" + (i+1), "node" + (i+1), Replica.State.ACTIVE, 3));
|
||||
}
|
||||
|
||||
assertEquals(1, reader.getClusterState().getSlice(collection, "shard1").getReplicasMap().size());
|
||||
assertEquals(1, reader.getClusterState().getSlice(collection, "shard2").getReplicasMap().size());
|
||||
assertEquals(1, reader.getClusterState().getSlice(collection, "shard3").getReplicasMap().size());
|
||||
assertEquals(1, reader.getClusterState().getSlice(COLLECTION, "shard1").getReplicasMap().size());
|
||||
assertEquals(1, reader.getClusterState().getSlice(COLLECTION, "shard2").getReplicasMap().size());
|
||||
assertEquals(1, reader.getClusterState().getSlice(COLLECTION, "shard3").getReplicasMap().size());
|
||||
|
||||
//make sure leaders are in cloud state
|
||||
assertNotNull(reader.getLeaderUrl(collection, "shard1", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(collection, "shard2", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(collection, "shard3", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(COLLECTION, "shard2", 15000));
|
||||
assertNotNull(reader.getLeaderUrl(COLLECTION, "shard3", 15000));
|
||||
|
||||
// publish a bad queue item
|
||||
String emptyCollectionName = "";
|
||||
|
@ -408,7 +408,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
final String coreName = "core" + slot;
|
||||
|
||||
try {
|
||||
ids[slot] = controllers[slot % nodeCount].publishState(collection, coreName, "node" + slot, Replica.State.ACTIVE, sliceCount);
|
||||
ids[slot] = controllers[slot % nodeCount].publishState(COLLECTION, coreName, "node" + slot, Replica.State.ACTIVE, sliceCount);
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
fail("register threw exception:" + e.getClass());
|
||||
|
@ -429,7 +429,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
for (int i = 0; i < 40; i++) {
|
||||
cloudStateSliceCount = 0;
|
||||
ClusterState state = reader.getClusterState();
|
||||
final Map<String,Slice> slices = state.getSlicesMap(collection);
|
||||
final Map<String,Slice> slices = state.getSlicesMap(COLLECTION);
|
||||
if (slices != null) {
|
||||
for (String name : slices.keySet()) {
|
||||
cloudStateSliceCount += slices.get(name).getReplicasMap().size();
|
||||
|
@ -483,7 +483,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
|
||||
//make sure leaders are in cloud state
|
||||
for (int i = 0; i < sliceCount; i++) {
|
||||
assertNotNull(reader.getLeaderUrl(collection, "shard" + (i + 1), 15000));
|
||||
assertNotNull(reader.getLeaderUrl(COLLECTION, "shard" + (i + 1), 15000));
|
||||
}
|
||||
|
||||
} finally {
|
||||
|
@ -549,23 +549,23 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(),
|
||||
ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr",
|
||||
ZkStateReader.NODE_NAME_PROP, "node1",
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.COLLECTION_PROP, COLLECTION,
|
||||
ZkStateReader.CORE_NAME_PROP, "core1",
|
||||
ZkStateReader.ROLES_PROP, "",
|
||||
ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString());
|
||||
|
||||
q.offer(Utils.toJSON(m));
|
||||
|
||||
waitForCollections(reader, collection);
|
||||
waitForCollections(reader, COLLECTION);
|
||||
|
||||
assertSame(reader.getClusterState().toString(), Replica.State.RECOVERING,
|
||||
reader.getClusterState().getSlice(collection, "shard1").getReplica("core_node1").getState());
|
||||
reader.getClusterState().getSlice(COLLECTION, "shard1").getReplica("core_node1").getState());
|
||||
|
||||
//publish node state (active)
|
||||
m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(),
|
||||
ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr",
|
||||
ZkStateReader.NODE_NAME_PROP, "node1",
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.COLLECTION_PROP, COLLECTION,
|
||||
ZkStateReader.CORE_NAME_PROP, "core1",
|
||||
ZkStateReader.ROLES_PROP, "",
|
||||
ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString());
|
||||
|
@ -634,25 +634,25 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
overseerClient = electNewOverseer(server.getZkAddress());
|
||||
|
||||
Thread.sleep(1000);
|
||||
mockController.publishState(collection, core, core_node,
|
||||
mockController.publishState(COLLECTION, core, core_node,
|
||||
Replica.State.RECOVERING, numShards);
|
||||
|
||||
waitForCollections(reader, collection);
|
||||
verifyReplicaStatus(reader, collection, "shard1", "core_node1", Replica.State.RECOVERING);
|
||||
waitForCollections(reader, COLLECTION);
|
||||
verifyReplicaStatus(reader, COLLECTION, "shard1", "core_node1", Replica.State.RECOVERING);
|
||||
|
||||
int version = getClusterStateVersion(zkClient);
|
||||
|
||||
mockController.publishState(collection, core, core_node, Replica.State.ACTIVE,
|
||||
mockController.publishState(COLLECTION, core, core_node, Replica.State.ACTIVE,
|
||||
numShards);
|
||||
|
||||
while (version == getClusterStateVersion(zkClient));
|
||||
|
||||
verifyReplicaStatus(reader, collection, "shard1", "core_node1", Replica.State.ACTIVE);
|
||||
verifyReplicaStatus(reader, COLLECTION, "shard1", "core_node1", Replica.State.ACTIVE);
|
||||
version = getClusterStateVersion(zkClient);
|
||||
overseerClient.close();
|
||||
Thread.sleep(1000); // wait for overseer to get killed
|
||||
|
||||
mockController.publishState(collection, core, core_node,
|
||||
mockController.publishState(COLLECTION, core, core_node,
|
||||
Replica.State.RECOVERING, numShards);
|
||||
version = getClusterStateVersion(zkClient);
|
||||
|
||||
|
@ -660,20 +660,20 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
|
||||
while (version == getClusterStateVersion(zkClient));
|
||||
|
||||
verifyReplicaStatus(reader, collection, "shard1", "core_node1", Replica.State.RECOVERING);
|
||||
verifyReplicaStatus(reader, COLLECTION, "shard1", "core_node1", Replica.State.RECOVERING);
|
||||
|
||||
assertEquals("Live nodes count does not match", 1, reader
|
||||
.getClusterState().getLiveNodes().size());
|
||||
assertEquals(shard+" replica count does not match", 1, reader.getClusterState()
|
||||
.getSlice(collection, shard).getReplicasMap().size());
|
||||
.getSlice(COLLECTION, shard).getReplicasMap().size());
|
||||
version = getClusterStateVersion(zkClient);
|
||||
mockController.publishState(collection, core, core_node, null, numShards);
|
||||
mockController.publishState(COLLECTION, core, core_node, null, numShards);
|
||||
while (version == getClusterStateVersion(zkClient));
|
||||
Thread.sleep(500);
|
||||
assertTrue(collection+" should remain after removal of the last core", // as of SOLR-5209 core removal does not cascade to remove the slice and collection
|
||||
reader.getClusterState().hasCollection(collection));
|
||||
assertTrue(COLLECTION +" should remain after removal of the last core", // as of SOLR-5209 core removal does not cascade to remove the slice and collection
|
||||
reader.getClusterState().hasCollection(COLLECTION));
|
||||
assertTrue(core_node+" should be gone after publishing the null state",
|
||||
null == reader.getClusterState().getCollection(collection).getReplica(core_node));
|
||||
null == reader.getClusterState().getCollection(COLLECTION).getReplica(core_node));
|
||||
} finally {
|
||||
close(mockController);
|
||||
close(overseerClient);
|
||||
|
@ -723,7 +723,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
overseerElector.setup(ec);
|
||||
overseerElector.joinElection(ec, false);
|
||||
|
||||
mockController.publishState(collection, "core1", "core_node1", Replica.State.ACTIVE, 1);
|
||||
mockController.publishState(COLLECTION, "core1", "core_node1", Replica.State.ACTIVE, 1);
|
||||
|
||||
assertNotNull(overseer.getStats());
|
||||
assertTrue((overseer.getStats().getSuccessCount(OverseerAction.STATE.toLower())) > 0);
|
||||
|
@ -819,19 +819,19 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
for (int i = 0; i < atLeast(4); i++) {
|
||||
killCounter.incrementAndGet(); //for each round allow 1 kill
|
||||
mockController = new MockZKController(server.getZkAddress(), "node1");
|
||||
mockController.publishState(collection, "core1", "node1", Replica.State.ACTIVE,1);
|
||||
mockController.publishState(COLLECTION, "core1", "node1", Replica.State.ACTIVE,1);
|
||||
if(mockController2!=null) {
|
||||
mockController2.close();
|
||||
mockController2 = null;
|
||||
}
|
||||
mockController.publishState(collection, "core1", "node1",Replica.State.RECOVERING,1);
|
||||
mockController.publishState(COLLECTION, "core1", "node1",Replica.State.RECOVERING,1);
|
||||
mockController2 = new MockZKController(server.getZkAddress(), "node2");
|
||||
mockController.publishState(collection, "core1", "node1", Replica.State.ACTIVE,1);
|
||||
verifyShardLeader(reader, collection, "shard1", "core1");
|
||||
mockController2.publishState(collection, "core4", "node2", Replica.State.ACTIVE ,1);
|
||||
mockController.publishState(COLLECTION, "core1", "node1", Replica.State.ACTIVE,1);
|
||||
verifyShardLeader(reader, COLLECTION, "shard1", "core1");
|
||||
mockController2.publishState(COLLECTION, "core4", "node2", Replica.State.ACTIVE ,1);
|
||||
mockController.close();
|
||||
mockController = null;
|
||||
verifyShardLeader(reader, collection, "shard1", "core4");
|
||||
verifyShardLeader(reader, COLLECTION, "shard1", "core4");
|
||||
}
|
||||
} finally {
|
||||
if (killer != null) {
|
||||
|
@ -874,18 +874,18 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
|
||||
overseerClient = electNewOverseer(server.getZkAddress());
|
||||
|
||||
mockController.publishState(collection, "core1", "core_node1", Replica.State.RECOVERING, 1);
|
||||
mockController.publishState(COLLECTION, "core1", "core_node1", Replica.State.RECOVERING, 1);
|
||||
|
||||
waitForCollections(reader, "collection1");
|
||||
|
||||
verifyReplicaStatus(reader, collection, "shard1", "core_node1", Replica.State.RECOVERING);
|
||||
verifyReplicaStatus(reader, COLLECTION, "shard1", "core_node1", Replica.State.RECOVERING);
|
||||
|
||||
mockController.close();
|
||||
|
||||
int version = getClusterStateVersion(controllerClient);
|
||||
|
||||
mockController = new MockZKController(server.getZkAddress(), "node1");
|
||||
mockController.publishState(collection, "core1", "core_node1", Replica.State.RECOVERING, 1);
|
||||
mockController.publishState(COLLECTION, "core1", "core_node1", Replica.State.RECOVERING, 1);
|
||||
|
||||
while (version == reader.getClusterState().getZkClusterStateVersion()) {
|
||||
Thread.sleep(100);
|
||||
|
@ -940,11 +940,11 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
|
||||
overseerClient = electNewOverseer(server.getZkAddress());
|
||||
|
||||
mockController.publishState(collection, "core1", "node1", Replica.State.RECOVERING, 12);
|
||||
mockController.publishState(COLLECTION, "core1", "node1", Replica.State.RECOVERING, 12);
|
||||
|
||||
waitForCollections(reader, collection);
|
||||
waitForCollections(reader, COLLECTION);
|
||||
|
||||
assertEquals("Slicecount does not match", 12, reader.getClusterState().getSlices(collection).size());
|
||||
assertEquals("Slicecount does not match", 12, reader.getClusterState().getSlices(COLLECTION).size());
|
||||
|
||||
} finally {
|
||||
close(overseerClient);
|
||||
|
@ -1117,7 +1117,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr",
|
||||
ZkStateReader.NODE_NAME_PROP, "node1",
|
||||
ZkStateReader.SHARD_ID_PROP, "s1",
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.COLLECTION_PROP, COLLECTION,
|
||||
ZkStateReader.CORE_NAME_PROP, "core1",
|
||||
ZkStateReader.ROLES_PROP, "",
|
||||
ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString());
|
||||
|
@ -1126,7 +1126,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr",
|
||||
ZkStateReader.NODE_NAME_PROP, "node1",
|
||||
ZkStateReader.SHARD_ID_PROP, "s1",
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.COLLECTION_PROP, COLLECTION,
|
||||
ZkStateReader.CORE_NAME_PROP, "core2",
|
||||
ZkStateReader.ROLES_PROP, "",
|
||||
ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString());
|
||||
|
@ -1140,19 +1140,19 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr",
|
||||
ZkStateReader.NODE_NAME_PROP, "node1",
|
||||
ZkStateReader.SHARD_ID_PROP, "s1",
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.COLLECTION_PROP, COLLECTION,
|
||||
ZkStateReader.CORE_NAME_PROP, "core3",
|
||||
ZkStateReader.ROLES_PROP, "",
|
||||
ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString());
|
||||
queue.offer(Utils.toJSON(m));
|
||||
|
||||
for(int i=0;i<100;i++) {
|
||||
Slice s = reader.getClusterState().getSlice(collection, "s1");
|
||||
Slice s = reader.getClusterState().getSlice(COLLECTION, "s1");
|
||||
if(s!=null && s.getReplicasMap().size()==3) break;
|
||||
Thread.sleep(100);
|
||||
}
|
||||
assertNotNull(reader.getClusterState().getSlice(collection, "s1"));
|
||||
assertEquals(3, reader.getClusterState().getSlice(collection, "s1").getReplicasMap().size());
|
||||
assertNotNull(reader.getClusterState().getSlice(COLLECTION, "s1"));
|
||||
assertEquals(3, reader.getClusterState().getSlice(COLLECTION, "s1").getReplicasMap().size());
|
||||
} finally {
|
||||
close(overseerClient);
|
||||
close(zkClient);
|
||||
|
@ -1340,14 +1340,14 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
{
|
||||
final Integer maxShardsPerNode = numReplicas * numShards;
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(),
|
||||
"name", collection,
|
||||
"name", COLLECTION,
|
||||
ZkStateReader.NUM_SHARDS_PROP, numShards.toString(),
|
||||
ZkStateReader.REPLICATION_FACTOR, "1",
|
||||
ZkStateReader.MAX_SHARDS_PER_NODE, maxShardsPerNode.toString()
|
||||
);
|
||||
q.offer(Utils.toJSON(m));
|
||||
}
|
||||
waitForCollections(zkStateReader, collection);
|
||||
waitForCollections(zkStateReader, COLLECTION);
|
||||
|
||||
// create nodes with state recovering
|
||||
for (int rr = 1; rr <= numReplicas; ++rr) {
|
||||
|
@ -1357,7 +1357,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr",
|
||||
ZkStateReader.SHARD_ID_PROP, "shard"+ss,
|
||||
ZkStateReader.NODE_NAME_PROP, "node"+N,
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.COLLECTION_PROP, COLLECTION,
|
||||
ZkStateReader.CORE_NAME_PROP, "core"+N,
|
||||
ZkStateReader.ROLES_PROP, "",
|
||||
ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString());
|
||||
|
@ -1369,7 +1369,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
for (int rr = 1; rr <= numReplicas; ++rr) {
|
||||
for (int ss = 1; ss <= numShards; ++ss) {
|
||||
final int N = (numReplicas-rr)*numShards + ss;
|
||||
verifyReplicaStatus(zkStateReader, collection, "shard"+ss, "core_node"+N, Replica.State.RECOVERING);
|
||||
verifyReplicaStatus(zkStateReader, COLLECTION, "shard"+ss, "core_node"+N, Replica.State.RECOVERING);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1380,7 +1380,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(),
|
||||
ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr",
|
||||
ZkStateReader.NODE_NAME_PROP, "node"+N,
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.COLLECTION_PROP, COLLECTION,
|
||||
ZkStateReader.CORE_NAME_PROP, "core"+N,
|
||||
ZkStateReader.ROLES_PROP, "",
|
||||
ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString());
|
||||
|
@ -1392,7 +1392,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
for (int rr = 1; rr <= numReplicas; ++rr) {
|
||||
for (int ss = 1; ss <= numShards; ++ss) {
|
||||
final int N = (numReplicas-rr)*numShards + ss;
|
||||
verifyReplicaStatus(zkStateReader, collection, "shard"+ss, "core_node"+N, Replica.State.ACTIVE);
|
||||
verifyReplicaStatus(zkStateReader, COLLECTION, "shard"+ss, "core_node"+N, Replica.State.ACTIVE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1401,7 +1401,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
for (int ss = 1; ss <= numShards; ++ss) {
|
||||
final int N = (numReplicas-rr)*numShards + ss;
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.DELETECORE.toLower(),
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.COLLECTION_PROP, COLLECTION,
|
||||
ZkStateReader.CORE_NODE_NAME_PROP, "core_node"+N);
|
||||
|
||||
q.offer(Utils.toJSON(m));
|
||||
|
@ -1409,23 +1409,23 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
{
|
||||
int iterationsLeft = 100;
|
||||
while (iterationsLeft-- > 0) {
|
||||
final Slice slice = zkStateReader.getClusterState().getSlice(collection, "shard"+ss);
|
||||
final Slice slice = zkStateReader.getClusterState().getSlice(COLLECTION, "shard"+ss);
|
||||
if (null == slice || null == slice.getReplicasMap().get("core_node"+N)) {
|
||||
break;
|
||||
}
|
||||
if (VERBOSE) log.info("still seeing {} shard{} core_node{}, rechecking in 50ms ({} iterations left)", collection, ss, N, iterationsLeft);
|
||||
if (VERBOSE) log.info("still seeing {} shard{} core_node{}, rechecking in 50ms ({} iterations left)", COLLECTION, ss, N, iterationsLeft);
|
||||
Thread.sleep(50);
|
||||
}
|
||||
}
|
||||
|
||||
final DocCollection docCollection = zkStateReader.getClusterState().getCollection(collection);
|
||||
assertTrue("found no "+collection, (null != docCollection));
|
||||
final DocCollection docCollection = zkStateReader.getClusterState().getCollection(COLLECTION);
|
||||
assertTrue("found no "+ COLLECTION, (null != docCollection));
|
||||
|
||||
final Slice slice = docCollection.getSlice("shard"+ss);
|
||||
assertTrue("found no "+collection+" shard"+ss+" slice after removal of replica "+rr+" of "+numReplicas, (null != slice));
|
||||
assertTrue("found no "+ COLLECTION +" shard"+ss+" slice after removal of replica "+rr+" of "+numReplicas, (null != slice));
|
||||
|
||||
final Collection<Replica> replicas = slice.getReplicas();
|
||||
assertEquals("wrong number of "+collection+" shard"+ss+" replicas left, replicas="+replicas, numReplicas-rr, replicas.size());
|
||||
assertEquals("wrong number of "+ COLLECTION +" shard"+ss+" replicas left, replicas="+replicas, numReplicas-rr, replicas.size());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,14 +34,14 @@ import org.apache.solr.response.SolrQueryResponse;
|
|||
|
||||
class SegmentTerminateEarlyTestState {
|
||||
|
||||
final String keyField = "id";
|
||||
static final String KEY_FIELD = "id";
|
||||
|
||||
// for historic reasons, this is refered to as a "timestamp" field, but in actuallity is just an int
|
||||
// value representing a number of "minutes" between 0-60.
|
||||
// aka: I decided not to rename a million things while refactoring this test
|
||||
public static final String timestampField = "timestamp_i_dvo";
|
||||
public static final String oddField = "odd_l1"; // <dynamicField name="*_l1" type="long" indexed="true" stored="true" multiValued="false"/>
|
||||
public static final String quadField = "quad_l1"; // <dynamicField name="*_l1" type="long" indexed="true" stored="true" multiValued="false"/>
|
||||
public static final String TIMESTAMP_FIELD = "timestamp_i_dvo";
|
||||
public static final String ODD_FIELD = "odd_l1"; // <dynamicField name="*_l1" type="long" indexed="true" stored="true" multiValued="false"/>
|
||||
public static final String QUAD_FIELD = "quad_l1"; // <dynamicField name="*_l1" type="long" indexed="true" stored="true" multiValued="false"/>
|
||||
|
||||
final Set<Integer> minTimestampDocKeys = new HashSet<>();
|
||||
final Set<Integer> maxTimestampDocKeys = new HashSet<>();
|
||||
|
@ -63,7 +63,7 @@ class SegmentTerminateEarlyTestState {
|
|||
++numDocs;
|
||||
final Integer docKey = new Integer(numDocs);
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
doc.setField(keyField, ""+docKey);
|
||||
doc.setField(KEY_FIELD, ""+docKey);
|
||||
final int MM = rand.nextInt(60); // minutes
|
||||
if (minTimestampMM == null || MM <= minTimestampMM.intValue()) {
|
||||
if (minTimestampMM != null && MM < minTimestampMM.intValue()) {
|
||||
|
@ -79,9 +79,9 @@ class SegmentTerminateEarlyTestState {
|
|||
maxTimestampMM = new Integer(MM);
|
||||
maxTimestampDocKeys.add(docKey);
|
||||
}
|
||||
doc.setField(timestampField, (Integer)MM);
|
||||
doc.setField(oddField, ""+(numDocs % 2));
|
||||
doc.setField(quadField, ""+(numDocs % 4)+1);
|
||||
doc.setField(TIMESTAMP_FIELD, (Integer)MM);
|
||||
doc.setField(ODD_FIELD, ""+(numDocs % 2));
|
||||
doc.setField(QUAD_FIELD, ""+(numDocs % 4)+1);
|
||||
cloudSolrClient.add(doc);
|
||||
}
|
||||
cloudSolrClient.commit();
|
||||
|
@ -95,9 +95,9 @@ class SegmentTerminateEarlyTestState {
|
|||
TestMiniSolrCloudCluster.assertFalse(maxTimestampDocKeys.isEmpty());
|
||||
TestMiniSolrCloudCluster.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0);
|
||||
final Long oddFieldValue = new Long(maxTimestampDocKeys.iterator().next().intValue()%2);
|
||||
final SolrQuery query = new SolrQuery(oddField+":"+oddFieldValue);
|
||||
query.setSort(timestampField, SolrQuery.ORDER.desc);
|
||||
query.setFields(keyField, oddField, timestampField);
|
||||
final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue);
|
||||
query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.desc);
|
||||
query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD);
|
||||
query.setRows(1);
|
||||
// CommonParams.SEGMENT_TERMINATE_EARLY parameter intentionally absent
|
||||
final QueryResponse rsp = cloudSolrClient.query(query);
|
||||
|
@ -106,9 +106,9 @@ class SegmentTerminateEarlyTestState {
|
|||
// check correctness of the first result
|
||||
if (rsp.getResults().getNumFound() > 0) {
|
||||
final SolrDocument solrDocument0 = rsp.getResults().get(0);
|
||||
TestMiniSolrCloudCluster.assertTrue(keyField+" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")",
|
||||
maxTimestampDocKeys.contains(solrDocument0.getFieldValue(keyField)));
|
||||
TestMiniSolrCloudCluster.assertEquals(oddField, oddFieldValue, solrDocument0.getFieldValue(oddField));
|
||||
TestMiniSolrCloudCluster.assertTrue(KEY_FIELD +" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")",
|
||||
maxTimestampDocKeys.contains(solrDocument0.getFieldValue(KEY_FIELD)));
|
||||
TestMiniSolrCloudCluster.assertEquals(ODD_FIELD, oddFieldValue, solrDocument0.getFieldValue(ODD_FIELD));
|
||||
}
|
||||
// check segmentTerminatedEarly flag
|
||||
TestMiniSolrCloudCluster.assertNull("responseHeader.segmentTerminatedEarly present in "+rsp.getResponseHeader(),
|
||||
|
@ -119,9 +119,9 @@ class SegmentTerminateEarlyTestState {
|
|||
TestMiniSolrCloudCluster.assertFalse(maxTimestampDocKeys.isEmpty());
|
||||
TestMiniSolrCloudCluster.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0);
|
||||
final Long oddFieldValue = new Long(maxTimestampDocKeys.iterator().next().intValue()%2);
|
||||
final SolrQuery query = new SolrQuery(oddField+":"+oddFieldValue);
|
||||
query.setSort(timestampField, SolrQuery.ORDER.desc);
|
||||
query.setFields(keyField, oddField, timestampField);
|
||||
final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue);
|
||||
query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.desc);
|
||||
query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD);
|
||||
final int rowsWanted = 1;
|
||||
query.setRows(rowsWanted);
|
||||
final Boolean shardsInfoWanted = (rand.nextBoolean() ? null : new Boolean(rand.nextBoolean()));
|
||||
|
@ -136,9 +136,9 @@ class SegmentTerminateEarlyTestState {
|
|||
// check correctness of the first result
|
||||
if (rsp.getResults().getNumFound() > 0) {
|
||||
final SolrDocument solrDocument0 = rsp.getResults().get(0);
|
||||
TestMiniSolrCloudCluster.assertTrue(keyField+" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")",
|
||||
maxTimestampDocKeys.contains(solrDocument0.getFieldValue(keyField)));
|
||||
TestMiniSolrCloudCluster.assertEquals(oddField, oddFieldValue, rsp.getResults().get(0).getFieldValue(oddField));
|
||||
TestMiniSolrCloudCluster.assertTrue(KEY_FIELD +" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")",
|
||||
maxTimestampDocKeys.contains(solrDocument0.getFieldValue(KEY_FIELD)));
|
||||
TestMiniSolrCloudCluster.assertEquals(ODD_FIELD, oddFieldValue, rsp.getResults().get(0).getFieldValue(ODD_FIELD));
|
||||
}
|
||||
// check segmentTerminatedEarly flag
|
||||
TestMiniSolrCloudCluster.assertNotNull("responseHeader.segmentTerminatedEarly missing in "+rsp.getResponseHeader(),
|
||||
|
@ -167,9 +167,9 @@ class SegmentTerminateEarlyTestState {
|
|||
TestMiniSolrCloudCluster.assertFalse(maxTimestampDocKeys.isEmpty());
|
||||
TestMiniSolrCloudCluster.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0);
|
||||
final Long oddFieldValue = new Long(maxTimestampDocKeys.iterator().next().intValue()%2);
|
||||
final SolrQuery query = new SolrQuery(oddField+":"+oddFieldValue);
|
||||
query.setSort(timestampField, SolrQuery.ORDER.desc);
|
||||
query.setFields(keyField, oddField, timestampField);
|
||||
final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue);
|
||||
query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.desc);
|
||||
query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD);
|
||||
query.setRows(1);
|
||||
final Boolean shardsInfoWanted = (rand.nextBoolean() ? null : new Boolean(rand.nextBoolean()));
|
||||
if (shardsInfoWanted != null) {
|
||||
|
@ -182,9 +182,9 @@ class SegmentTerminateEarlyTestState {
|
|||
// check correctness of the first result
|
||||
if (rsp.getResults().getNumFound() > 0) {
|
||||
final SolrDocument solrDocument0 = rsp.getResults().get(0);
|
||||
TestMiniSolrCloudCluster.assertTrue(keyField+" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")",
|
||||
maxTimestampDocKeys.contains(solrDocument0.getFieldValue(keyField)));
|
||||
TestMiniSolrCloudCluster.assertEquals(oddField, oddFieldValue, rsp.getResults().get(0).getFieldValue(oddField));
|
||||
TestMiniSolrCloudCluster.assertTrue(KEY_FIELD +" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")",
|
||||
maxTimestampDocKeys.contains(solrDocument0.getFieldValue(KEY_FIELD)));
|
||||
TestMiniSolrCloudCluster.assertEquals(ODD_FIELD, oddFieldValue, rsp.getResults().get(0).getFieldValue(ODD_FIELD));
|
||||
}
|
||||
// check segmentTerminatedEarly flag
|
||||
TestMiniSolrCloudCluster.assertNull("responseHeader.segmentTerminatedEarly present in "+rsp.getResponseHeader(),
|
||||
|
@ -212,13 +212,13 @@ class SegmentTerminateEarlyTestState {
|
|||
TestMiniSolrCloudCluster.assertFalse(maxTimestampDocKeys.isEmpty());
|
||||
TestMiniSolrCloudCluster.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0);
|
||||
final Long oddFieldValue = new Long(maxTimestampDocKeys.iterator().next().intValue()%2);
|
||||
final SolrQuery query = new SolrQuery(oddField+":"+oddFieldValue);
|
||||
query.setSort(timestampField, SolrQuery.ORDER.desc);
|
||||
query.setFields(keyField, oddField, timestampField);
|
||||
final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue);
|
||||
query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.desc);
|
||||
query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD);
|
||||
query.setRows(1);
|
||||
query.set(CommonParams.SEGMENT_TERMINATE_EARLY, true);
|
||||
TestMiniSolrCloudCluster.assertTrue("numDocs="+numDocs+" is not quad-able", (numDocs%4)==0);
|
||||
query.add("group.field", quadField);
|
||||
query.add("group.field", QUAD_FIELD);
|
||||
query.set("group", true);
|
||||
final QueryResponse rsp = cloudSolrClient.query(query);
|
||||
// check correctness of the results count
|
||||
|
@ -226,9 +226,9 @@ class SegmentTerminateEarlyTestState {
|
|||
// check correctness of the first result
|
||||
if (rsp.getGroupResponse().getValues().get(0).getMatches() > 0) {
|
||||
final SolrDocument solrDocument = rsp.getGroupResponse().getValues().get(0).getValues().get(0).getResult().get(0);
|
||||
TestMiniSolrCloudCluster.assertTrue(keyField+" of ("+solrDocument+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")",
|
||||
maxTimestampDocKeys.contains(solrDocument.getFieldValue(keyField)));
|
||||
TestMiniSolrCloudCluster.assertEquals(oddField, oddFieldValue, solrDocument.getFieldValue(oddField));
|
||||
TestMiniSolrCloudCluster.assertTrue(KEY_FIELD +" of ("+solrDocument+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")",
|
||||
maxTimestampDocKeys.contains(solrDocument.getFieldValue(KEY_FIELD)));
|
||||
TestMiniSolrCloudCluster.assertEquals(ODD_FIELD, oddFieldValue, solrDocument.getFieldValue(ODD_FIELD));
|
||||
}
|
||||
// check segmentTerminatedEarly flag
|
||||
// at present segmentTerminateEarly cannot be used with grouped queries
|
||||
|
@ -240,9 +240,9 @@ class SegmentTerminateEarlyTestState {
|
|||
TestMiniSolrCloudCluster.assertFalse(minTimestampDocKeys.isEmpty());
|
||||
TestMiniSolrCloudCluster.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0);
|
||||
final Long oddFieldValue = new Long(minTimestampDocKeys.iterator().next().intValue()%2);
|
||||
final SolrQuery query = new SolrQuery(oddField+":"+oddFieldValue);
|
||||
query.setSort(timestampField, SolrQuery.ORDER.asc); // a sort order that is _not_ compatible with the merge sort order
|
||||
query.setFields(keyField, oddField, timestampField);
|
||||
final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue);
|
||||
query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.asc); // a sort order that is _not_ compatible with the merge sort order
|
||||
query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD);
|
||||
query.setRows(1);
|
||||
query.set(CommonParams.SEGMENT_TERMINATE_EARLY, true);
|
||||
final QueryResponse rsp = cloudSolrClient.query(query);
|
||||
|
@ -251,9 +251,9 @@ class SegmentTerminateEarlyTestState {
|
|||
// check correctness of the first result
|
||||
if (rsp.getResults().getNumFound() > 0) {
|
||||
final SolrDocument solrDocument0 = rsp.getResults().get(0);
|
||||
TestMiniSolrCloudCluster.assertTrue(keyField+" of ("+solrDocument0+") is not in minTimestampDocKeys("+minTimestampDocKeys+")",
|
||||
minTimestampDocKeys.contains(solrDocument0.getFieldValue(keyField)));
|
||||
TestMiniSolrCloudCluster.assertEquals(oddField, oddFieldValue, solrDocument0.getFieldValue(oddField));
|
||||
TestMiniSolrCloudCluster.assertTrue(KEY_FIELD +" of ("+solrDocument0+") is not in minTimestampDocKeys("+minTimestampDocKeys+")",
|
||||
minTimestampDocKeys.contains(solrDocument0.getFieldValue(KEY_FIELD)));
|
||||
TestMiniSolrCloudCluster.assertEquals(ODD_FIELD, oddFieldValue, solrDocument0.getFieldValue(ODD_FIELD));
|
||||
}
|
||||
// check segmentTerminatedEarly flag
|
||||
TestMiniSolrCloudCluster.assertNotNull("responseHeader.segmentTerminatedEarly missing in "+rsp.getResponseHeader(),
|
||||
|
|
|
@ -163,7 +163,7 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 {
|
|||
|
||||
// Just a random port, I'm not going to use it but just check that the Solr instance constructed from the XML
|
||||
// file in ZK overrides the default port.
|
||||
private final String XML_FOR_ZK =
|
||||
private static final String XML_FOR_ZK =
|
||||
"<solr>" +
|
||||
" <solrcloud>" +
|
||||
" <str name=\"host\">127.0.0.1</str>" +
|
||||
|
|
|
@ -123,7 +123,7 @@ public class TestSegmentSorting extends SolrCloudTestCase {
|
|||
public void testAtomicUpdateOfSegmentSortField() throws Exception {
|
||||
|
||||
final CloudSolrClient cloudSolrClient = cluster.getSolrClient();
|
||||
final String updateField = SegmentTerminateEarlyTestState.timestampField;
|
||||
final String updateField = SegmentTerminateEarlyTestState.TIMESTAMP_FIELD;
|
||||
|
||||
// sanity check that updateField is in fact a DocValues only field, meaning it
|
||||
// would normally be eligable for inplace updates -- if it weren't also used for merge sorting
|
||||
|
|
|
@ -66,7 +66,7 @@ public class OpenCloseCoreStressTest extends SolrTestCaseJ4 {
|
|||
final int indexingThreads = TEST_NIGHTLY ? 9 : 5;
|
||||
final int queryThreads = TEST_NIGHTLY ? 9 : 5;
|
||||
|
||||
final int resetInterval = 30 * 60; // minutes to report then delete everything
|
||||
static final int RESET_INTERVAL = 30 * 60; // minutes to report then delete everything
|
||||
long cumulativeDocs = 0;
|
||||
|
||||
String url;
|
||||
|
@ -165,7 +165,7 @@ public class OpenCloseCoreStressTest extends SolrTestCaseJ4 {
|
|||
int secondsRemaining = secondsToRun;
|
||||
do {
|
||||
|
||||
int cycleSeconds = Math.min(resetInterval, secondsRemaining);
|
||||
int cycleSeconds = Math.min(RESET_INTERVAL, secondsRemaining);
|
||||
log.info(String.format(Locale.ROOT, "\n\n\n\n\nStarting a %,d second cycle, seconds left: %,d. Seconds run so far: %,d.",
|
||||
cycleSeconds, secondsRemaining, secondsRun));
|
||||
|
||||
|
@ -177,7 +177,7 @@ public class OpenCloseCoreStressTest extends SolrTestCaseJ4 {
|
|||
|
||||
queries.waitOnThreads();
|
||||
|
||||
secondsRemaining = Math.max(secondsRemaining - resetInterval, 0);
|
||||
secondsRemaining = Math.max(secondsRemaining - RESET_INTERVAL, 0);
|
||||
|
||||
checkResults(queryingClients.get(0), queries, idxer);
|
||||
|
||||
|
|
|
@ -62,12 +62,12 @@ public class SpatialRPTFieldTypeTest extends AbstractBadConfigTestBase {
|
|||
System.clearProperty("managed.schema.mutable");
|
||||
System.clearProperty("enable.update.log");
|
||||
}
|
||||
|
||||
final String INDEXED_COORDINATES = "25,82";
|
||||
final String QUERY_COORDINATES = "24,81";
|
||||
final String DISTANCE_DEGREES = "1.3520328";
|
||||
final String DISTANCE_KILOMETERS = "150.33939";
|
||||
final String DISTANCE_MILES = "93.416565";
|
||||
|
||||
static final String INDEXED_COORDINATES = "25,82";
|
||||
static final String QUERY_COORDINATES = "24,81";
|
||||
static final String DISTANCE_DEGREES = "1.3520328";
|
||||
static final String DISTANCE_KILOMETERS = "150.33939";
|
||||
static final String DISTANCE_MILES = "93.416565";
|
||||
|
||||
public void testDistanceUnitsDegrees() throws Exception {
|
||||
setupRPTField("degrees", "true");
|
||||
|
|
|
@ -47,7 +47,7 @@ public class TestRTGBase extends SolrTestCaseJ4 {
|
|||
protected long snapshotCount;
|
||||
protected long committedModelClock;
|
||||
protected volatile int lastId;
|
||||
protected final String field = "val_l";
|
||||
protected static final String FIELD = "val_l";
|
||||
protected Object[] syncArr;
|
||||
|
||||
protected Object globalLock = this;
|
||||
|
|
|
@ -667,7 +667,7 @@ public class TestRealTimeGet extends TestRTGBase {
|
|||
}
|
||||
|
||||
Long version = null;
|
||||
SolrInputDocument sd = sdoc("id", Integer.toString(id), field, Long.toString(nextVal));
|
||||
SolrInputDocument sd = sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal));
|
||||
|
||||
if (opt) {
|
||||
if (correct) {
|
||||
|
@ -762,7 +762,7 @@ public class TestRealTimeGet extends TestRTGBase {
|
|||
// This is also correct when filteredOut==true
|
||||
} else {
|
||||
assertEquals(1, doclist.size());
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(field));
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD));
|
||||
long foundVer = (Long)(((Map)doclist.get(0)).get("_version_"));
|
||||
if (filteredOut || foundVal < Math.abs(info.val)
|
||||
|| (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must
|
||||
|
|
|
@ -197,7 +197,7 @@ public class TestReloadDeadlock extends TestRTGBase {
|
|||
private void addDoc(int id, long nextVal, long version) throws Exception {
|
||||
ifVerbose("adding id", id, "val=", nextVal, "version", version);
|
||||
|
||||
Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), field, Long.toString(nextVal),
|
||||
Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal),
|
||||
"_version_", Long.toString(version)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
|
||||
if (returnedVersion != null) {
|
||||
assertEquals(version, returnedVersion.longValue());
|
||||
|
|
|
@ -226,7 +226,7 @@ public class TestStressLucene extends TestRTGBase {
|
|||
if (tombstones) {
|
||||
Document d = new Document();
|
||||
d.add(new Field("id","-"+Integer.toString(id), idFt));
|
||||
d.add(new Field(field, Long.toString(nextVal), ft2));
|
||||
d.add(new Field(FIELD, Long.toString(nextVal), ft2));
|
||||
verbose("adding tombstone for id",id,"val=",nextVal);
|
||||
writer.updateDocument(new Term("id", "-"+Integer.toString(id)), d);
|
||||
}
|
||||
|
@ -243,7 +243,7 @@ public class TestStressLucene extends TestRTGBase {
|
|||
if (tombstones) {
|
||||
Document d = new Document();
|
||||
d.add(new Field("id","-"+Integer.toString(id), idFt));
|
||||
d.add(new Field(field, Long.toString(nextVal), ft2));
|
||||
d.add(new Field(FIELD, Long.toString(nextVal), ft2));
|
||||
verbose("adding tombstone for id",id,"val=",nextVal);
|
||||
writer.updateDocument(new Term("id", "-"+Integer.toString(id)), d);
|
||||
}
|
||||
|
@ -258,7 +258,7 @@ public class TestStressLucene extends TestRTGBase {
|
|||
// assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal)));
|
||||
Document d = new Document();
|
||||
d.add(new Field("id",Integer.toString(id), idFt));
|
||||
d.add(new Field(field, Long.toString(nextVal), ft2));
|
||||
d.add(new Field(FIELD, Long.toString(nextVal), ft2));
|
||||
verbose("adding id",id,"val=",nextVal);
|
||||
writer.updateDocument(new Term("id", Integer.toString(id)), d);
|
||||
if (tombstones) {
|
||||
|
@ -337,7 +337,7 @@ public class TestStressLucene extends TestRTGBase {
|
|||
}
|
||||
assertTrue(docid >= 0); // we should have found the document, or its tombstone
|
||||
Document doc = r.document(docid);
|
||||
long foundVal = Long.parseLong(doc.get(field));
|
||||
long foundVal = Long.parseLong(doc.get(FIELD));
|
||||
if (foundVal < Math.abs(val)) {
|
||||
verbose("ERROR: id",id,"model_val=",val," foundVal=",foundVal,"reader=",reader);
|
||||
}
|
||||
|
|
|
@ -228,7 +228,7 @@ public class TestStressRecovery extends TestRTGBase {
|
|||
} else {
|
||||
verbose("adding id", id, "val=", nextVal,"version",version);
|
||||
|
||||
Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), field, Long.toString(nextVal), "_version_",Long.toString(version)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
|
||||
Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal), "_version_",Long.toString(version)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
|
||||
if (returnedVersion != null) {
|
||||
assertEquals(version, returnedVersion.longValue());
|
||||
}
|
||||
|
@ -310,7 +310,7 @@ public class TestStressRecovery extends TestRTGBase {
|
|||
// there's no info we can get back with a delete, so not much we can check without further synchronization
|
||||
} else {
|
||||
assertEquals(1, doclist.size());
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(field));
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD));
|
||||
long foundVer = (Long)(((Map)doclist.get(0)).get("_version_"));
|
||||
if (foundVer < Math.abs(info.version)
|
||||
|| (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must
|
||||
|
|
|
@ -223,7 +223,7 @@ public class TestStressReorder extends TestRTGBase {
|
|||
} else {
|
||||
verbose("adding id", id, "val=", nextVal,"version",version);
|
||||
|
||||
Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), field, Long.toString(nextVal), "_version_",Long.toString(version)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
|
||||
Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal), "_version_",Long.toString(version)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER));
|
||||
if (returnedVersion != null) {
|
||||
assertEquals(version, returnedVersion.longValue());
|
||||
}
|
||||
|
@ -301,7 +301,7 @@ public class TestStressReorder extends TestRTGBase {
|
|||
// there's no info we can get back with a delete, so not much we can check without further synchronization
|
||||
} else {
|
||||
assertEquals(1, doclist.size());
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(field));
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD));
|
||||
long foundVer = (Long)(((Map)doclist.get(0)).get("_version_"));
|
||||
if (foundVer < Math.abs(info.version)
|
||||
|| (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must
|
||||
|
|
|
@ -203,7 +203,7 @@ public class TestStressUserVersions extends TestRTGBase {
|
|||
} else {
|
||||
verbose("adding id", id, "val=", nextVal,"version",version);
|
||||
|
||||
Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), field, Long.toString(nextVal), vfield, Long.toString(version)), null);
|
||||
Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal), vfield, Long.toString(version)), null);
|
||||
|
||||
// only update model if the version is newer
|
||||
synchronized (model) {
|
||||
|
@ -282,7 +282,7 @@ public class TestStressUserVersions extends TestRTGBase {
|
|||
long foundVer = (Long)(((Map)doclist.get(0)).get(vfield));
|
||||
|
||||
if (isLive) {
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(field));
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD));
|
||||
if (foundVer < Math.abs(info.version)
|
||||
|| (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must
|
||||
log.error("ERROR, id=" + id + " found=" + response + " model" + info);
|
||||
|
@ -290,7 +290,7 @@ public class TestStressUserVersions extends TestRTGBase {
|
|||
}
|
||||
} else {
|
||||
// if the doc is deleted (via tombstone), it shouldn't have a value on it.
|
||||
assertNull( ((Map)doclist.get(0)).get(field) );
|
||||
assertNull( ((Map)doclist.get(0)).get(FIELD) );
|
||||
|
||||
if (foundVer < Math.abs(info.version)) {
|
||||
log.error("ERROR, id=" + id + " found=" + response + " model" + info);
|
||||
|
|
|
@ -172,7 +172,7 @@ public class TestStressVersions extends TestRTGBase {
|
|||
verbose("adding id", id, "val=", nextVal);
|
||||
|
||||
// assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal)));
|
||||
Long version = addAndGetVersion(sdoc("id", Integer.toString(id), field, Long.toString(nextVal)), null);
|
||||
Long version = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal)), null);
|
||||
assertTrue(version > 0);
|
||||
|
||||
// only update model if the version is newer
|
||||
|
@ -247,7 +247,7 @@ public class TestStressVersions extends TestRTGBase {
|
|||
// there's no info we can get back with a delete, so not much we can check without further synchronization
|
||||
} else {
|
||||
assertEquals(1, doclist.size());
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(field));
|
||||
long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD));
|
||||
long foundVer = (Long)(((Map)doclist.get(0)).get("_version_"));
|
||||
if (foundVer < Math.abs(info.version)
|
||||
|| (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must
|
||||
|
|
|
@ -47,10 +47,10 @@ import org.noggit.CharArr;
|
|||
public class TestJavaBinCodec extends SolrTestCaseJ4 {
|
||||
|
||||
private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN = "/solrj/javabin_backcompat.bin";
|
||||
private final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/javabin_backcompat.bin";
|
||||
private static final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/javabin_backcompat.bin";
|
||||
|
||||
private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS = "/solrj/javabin_backcompat_child_docs.bin";
|
||||
private final String BIN_FILE_LOCATION_CHILD_DOCS = "./solr/solrj/src/test-files/solrj/javabin_backcompat_child_docs.bin";
|
||||
private static final String BIN_FILE_LOCATION_CHILD_DOCS = "./solr/solrj/src/test-files/solrj/javabin_backcompat_child_docs.bin";
|
||||
|
||||
public void testStrings() throws Exception {
|
||||
for (int i = 0; i < 10000 * RANDOM_MULTIPLIER; i++) {
|
||||
|
|
Loading…
Reference in New Issue