mirror of https://github.com/apache/lucene.git
Replace println(String.format(...)) with printf(...) (#12976)
This commit is contained in:
parent
57b104e806
commit
91272f45da
|
@ -43,7 +43,7 @@ import org.junit.runner.RunWith;
|
|||
|
||||
@RunWith(RandomizedRunner.class)
|
||||
public class TestCodecLoadingDeadlock extends Assert {
|
||||
private static int MAX_TIME_SECONDS = 30;
|
||||
private static final int MAX_TIME_SECONDS = 30;
|
||||
|
||||
@Test
|
||||
public void testDeadlock() throws Exception {
|
||||
|
@ -59,8 +59,7 @@ public class TestCodecLoadingDeadlock extends Assert {
|
|||
new ArrayList<>(avail = DocValuesFormat.availableDocValuesFormats())
|
||||
.get(rnd.nextInt(avail.size()));
|
||||
|
||||
System.out.println(
|
||||
String.format(Locale.ROOT, "codec: %s, pf: %s, dvf: %s", codecName, pfName, dvfName));
|
||||
System.out.printf(Locale.ROOT, "codec: %s, pf: %s, dvf: %s%n", codecName, pfName, dvfName);
|
||||
|
||||
List<String> args = new ArrayList<>();
|
||||
args.add(Paths.get(System.getProperty("java.home"), "bin", "java").toString());
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.search.grouping;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -185,8 +184,7 @@ public class TestAllGroupHeadsCollector extends LuceneTestCase {
|
|||
int numberOfRuns = atLeast(1);
|
||||
for (int iter = 0; iter < numberOfRuns; iter++) {
|
||||
if (VERBOSE) {
|
||||
System.out.println(
|
||||
String.format(Locale.ROOT, "TEST: iter=%d total=%d", iter, numberOfRuns));
|
||||
System.out.printf(Locale.ROOT, "TEST: iter=%d total=%d%n", iter, numberOfRuns);
|
||||
}
|
||||
|
||||
final int numDocs = TestUtil.nextInt(random(), 100, 1000) * RANDOM_MULTIPLIER;
|
||||
|
@ -205,7 +203,7 @@ public class TestAllGroupHeadsCollector extends LuceneTestCase {
|
|||
// For that reason we don't generate empty string groups.
|
||||
randomValue = TestUtil.randomRealisticUnicodeString(random());
|
||||
// randomValue = TestUtil.randomSimpleString(random());
|
||||
} while ("".equals(randomValue));
|
||||
} while (randomValue.isEmpty());
|
||||
groups.add(new BytesRef(randomValue));
|
||||
}
|
||||
final String[] contentStrings = new String[TestUtil.nextInt(random(), 2, 20)];
|
||||
|
@ -221,7 +219,7 @@ public class TestAllGroupHeadsCollector extends LuceneTestCase {
|
|||
}
|
||||
contentStrings[contentIDX] = sb.toString();
|
||||
if (VERBOSE) {
|
||||
System.out.println(" content=" + sb.toString());
|
||||
System.out.println(" content=" + sb);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -231,8 +229,7 @@ public class TestAllGroupHeadsCollector extends LuceneTestCase {
|
|||
|
||||
Document doc = new Document();
|
||||
Document docNoGroup = new Document();
|
||||
Field valuesField = null;
|
||||
valuesField = new SortedDocValuesField("group", new BytesRef());
|
||||
Field valuesField = new SortedDocValuesField("group", new BytesRef());
|
||||
doc.add(valuesField);
|
||||
Field sort1 = new SortedDocValuesField("sort1", new BytesRef());
|
||||
doc.add(sort1);
|
||||
|
@ -373,32 +370,30 @@ public class TestAllGroupHeadsCollector extends LuceneTestCase {
|
|||
GroupDoc expectedGroupDoc = groupDocs[expectedDocId];
|
||||
String expectedGroup =
|
||||
expectedGroupDoc.group == null ? null : expectedGroupDoc.group.utf8ToString();
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"Group:%10s score%5f Sort1:%10s Sort2:%10s Sort3:%10s doc:%5d",
|
||||
expectedGroup,
|
||||
expectedGroupDoc.score,
|
||||
expectedGroupDoc.sort1.utf8ToString(),
|
||||
expectedGroupDoc.sort2.utf8ToString(),
|
||||
expectedGroupDoc.sort3.utf8ToString(),
|
||||
expectedDocId));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"Group:%10s score%5f Sort1:%10s Sort2:%10s Sort3:%10s doc:%5d%n",
|
||||
expectedGroup,
|
||||
expectedGroupDoc.score,
|
||||
expectedGroupDoc.sort1.utf8ToString(),
|
||||
expectedGroupDoc.sort2.utf8ToString(),
|
||||
expectedGroupDoc.sort3.utf8ToString(),
|
||||
expectedDocId);
|
||||
}
|
||||
System.out.println("\n=== Actual: \n");
|
||||
for (int actualDocId : actualGroupHeads) {
|
||||
GroupDoc actualGroupDoc = groupDocs[actualDocId];
|
||||
String actualGroup =
|
||||
actualGroupDoc.group == null ? null : actualGroupDoc.group.utf8ToString();
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"Group:%10s score%5f Sort1:%10s Sort2:%10s Sort3:%10s doc:%5d",
|
||||
actualGroup,
|
||||
actualGroupDoc.score,
|
||||
actualGroupDoc.sort1.utf8ToString(),
|
||||
actualGroupDoc.sort2.utf8ToString(),
|
||||
actualGroupDoc.sort3.utf8ToString(),
|
||||
actualDocId));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"Group:%10s score%5f Sort1:%10s Sort2:%10s Sort3:%10s doc:%5d%n",
|
||||
actualGroup,
|
||||
actualGroupDoc.score,
|
||||
actualGroupDoc.sort1.utf8ToString(),
|
||||
actualGroupDoc.sort2.utf8ToString(),
|
||||
actualGroupDoc.sort3.utf8ToString(),
|
||||
actualDocId);
|
||||
}
|
||||
System.out.println(
|
||||
"\n===================================================================================");
|
||||
|
@ -487,7 +482,7 @@ public class TestAllGroupHeadsCollector extends LuceneTestCase {
|
|||
int i = 0;
|
||||
for (BytesRef groupValue : groupHeads.keySet()) {
|
||||
List<GroupDoc> docs = groupHeads.get(groupValue);
|
||||
Collections.sort(docs, getComparator(docSort, sortByScoreOnly, fieldIdToDocID));
|
||||
docs.sort(getComparator(docSort, sortByScoreOnly, fieldIdToDocID));
|
||||
allGroupHeads[i++] = docs.get(0).id;
|
||||
}
|
||||
|
||||
|
@ -516,47 +511,43 @@ public class TestAllGroupHeadsCollector extends LuceneTestCase {
|
|||
} else if (!scoreOnly) {
|
||||
sortFields.add(new SortField("id", SortField.Type.INT));
|
||||
}
|
||||
return new Sort(sortFields.toArray(new SortField[sortFields.size()]));
|
||||
return new Sort(sortFields.toArray(new SortField[0]));
|
||||
}
|
||||
|
||||
private Comparator<GroupDoc> getComparator(
|
||||
Sort sort, final boolean sortByScoreOnly, final int[] fieldIdToDocID) {
|
||||
final SortField[] sortFields = sort.getSort();
|
||||
return new Comparator<GroupDoc>() {
|
||||
@Override
|
||||
public int compare(GroupDoc d1, GroupDoc d2) {
|
||||
for (SortField sf : sortFields) {
|
||||
final int cmp;
|
||||
if (sf.getType() == SortField.Type.SCORE) {
|
||||
if (d1.score > d2.score) {
|
||||
cmp = -1;
|
||||
} else if (d1.score < d2.score) {
|
||||
cmp = 1;
|
||||
} else {
|
||||
cmp = sortByScoreOnly ? fieldIdToDocID[d1.id] - fieldIdToDocID[d2.id] : 0;
|
||||
}
|
||||
} else if (sf.getField().equals("sort1")) {
|
||||
cmp = d1.sort1.compareTo(d2.sort1);
|
||||
} else if (sf.getField().equals("sort2")) {
|
||||
cmp = d1.sort2.compareTo(d2.sort2);
|
||||
} else if (sf.getField().equals("sort3")) {
|
||||
cmp = d1.sort3.compareTo(d2.sort3);
|
||||
return (d1, d2) -> {
|
||||
for (SortField sf : sortFields) {
|
||||
final int cmp;
|
||||
if (sf.getType() == SortField.Type.SCORE) {
|
||||
if (d1.score > d2.score) {
|
||||
cmp = -1;
|
||||
} else if (d1.score < d2.score) {
|
||||
cmp = 1;
|
||||
} else {
|
||||
assertEquals(sf.getField(), "id");
|
||||
cmp = d1.id - d2.id;
|
||||
}
|
||||
if (cmp != 0) {
|
||||
return sf.getReverse() ? -cmp : cmp;
|
||||
cmp = sortByScoreOnly ? fieldIdToDocID[d1.id] - fieldIdToDocID[d2.id] : 0;
|
||||
}
|
||||
} else if (sf.getField().equals("sort1")) {
|
||||
cmp = d1.sort1.compareTo(d2.sort1);
|
||||
} else if (sf.getField().equals("sort2")) {
|
||||
cmp = d1.sort2.compareTo(d2.sort2);
|
||||
} else if (sf.getField().equals("sort3")) {
|
||||
cmp = d1.sort3.compareTo(d2.sort3);
|
||||
} else {
|
||||
assertEquals(sf.getField(), "id");
|
||||
cmp = d1.id - d2.id;
|
||||
}
|
||||
if (cmp != 0) {
|
||||
return sf.getReverse() ? -cmp : cmp;
|
||||
}
|
||||
// Our sort always fully tie breaks:
|
||||
fail();
|
||||
return 0;
|
||||
}
|
||||
// Our sort always fully tie breaks:
|
||||
fail();
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
private AllGroupHeadsCollector<?> createRandomCollector(String groupField, Sort sortWithinGroup) {
|
||||
if (random().nextBoolean()) {
|
||||
ValueSource vs = new BytesRefFieldSource(groupField);
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.search.grouping;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -453,13 +452,12 @@ public class TestGroupFacetCollector extends AbstractGroupingTestCase {
|
|||
System.out.println("Total missing count " + expectedFacetResult.getTotalMissingCount());
|
||||
int counter = 0;
|
||||
for (TermGroupFacetCollector.FacetEntry expectedFacetEntry : expectedFacetEntries) {
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%d. Expected facet value %s with count %d",
|
||||
counter++,
|
||||
expectedFacetEntry.getValue().utf8ToString(),
|
||||
expectedFacetEntry.getCount()));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"%d. Expected facet value %s with count %d%n",
|
||||
counter++,
|
||||
expectedFacetEntry.getValue().utf8ToString(),
|
||||
expectedFacetEntry.getCount());
|
||||
}
|
||||
|
||||
System.out.println("\n=== Actual: \n");
|
||||
|
@ -467,13 +465,12 @@ public class TestGroupFacetCollector extends AbstractGroupingTestCase {
|
|||
System.out.println("Total missing count " + actualFacetResult.getTotalMissingCount());
|
||||
counter = 0;
|
||||
for (TermGroupFacetCollector.FacetEntry actualFacetEntry : actualFacetEntries) {
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%d. Actual facet value %s with count %d",
|
||||
counter++,
|
||||
actualFacetEntry.getValue().utf8ToString(),
|
||||
actualFacetEntry.getCount()));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"%d. Actual facet value %s with count %d%n",
|
||||
counter++,
|
||||
actualFacetEntry.getValue().utf8ToString(),
|
||||
actualFacetEntry.getCount());
|
||||
}
|
||||
System.out.println(
|
||||
"\n===================================================================================");
|
||||
|
@ -581,19 +578,15 @@ public class TestGroupFacetCollector extends AbstractGroupingTestCase {
|
|||
|
||||
NavigableSet<String> uniqueFacetValues =
|
||||
new TreeSet<>(
|
||||
new Comparator<String>() {
|
||||
|
||||
@Override
|
||||
public int compare(String a, String b) {
|
||||
if (a == b) {
|
||||
return 0;
|
||||
} else if (a == null) {
|
||||
return -1;
|
||||
} else if (b == null) {
|
||||
return 1;
|
||||
} else {
|
||||
return a.compareTo(b);
|
||||
}
|
||||
(a, b) -> {
|
||||
if (a == b) {
|
||||
return 0;
|
||||
} else if (a == null) {
|
||||
return -1;
|
||||
} else if (b == null) {
|
||||
return 1;
|
||||
} else {
|
||||
return a.compareTo(b);
|
||||
}
|
||||
});
|
||||
Map<String, Map<String, Set<String>>> searchTermToFacetToGroups = new HashMap<>();
|
||||
|
@ -610,7 +603,7 @@ public class TestGroupFacetCollector extends AbstractGroupingTestCase {
|
|||
|
||||
String contentStr = contentBrs[random.nextInt(contentBrs.length)];
|
||||
if (!searchTermToFacetToGroups.containsKey(contentStr)) {
|
||||
searchTermToFacetToGroups.put(contentStr, new HashMap<String, Set<String>>());
|
||||
searchTermToFacetToGroups.put(contentStr, new HashMap<>());
|
||||
}
|
||||
Map<String, Set<String>> facetToGroups = searchTermToFacetToGroups.get(contentStr);
|
||||
|
||||
|
@ -619,7 +612,7 @@ public class TestGroupFacetCollector extends AbstractGroupingTestCase {
|
|||
String facetValue = facetValues.get(random.nextInt(facetValues.size()));
|
||||
uniqueFacetValues.add(facetValue);
|
||||
if (!facetToGroups.containsKey(facetValue)) {
|
||||
facetToGroups.put(facetValue, new HashSet<String>());
|
||||
facetToGroups.put(facetValue, new HashSet<>());
|
||||
}
|
||||
Set<String> groupsInFacet = facetToGroups.get(facetValue);
|
||||
groupsInFacet.add(groupValue);
|
||||
|
@ -634,7 +627,7 @@ public class TestGroupFacetCollector extends AbstractGroupingTestCase {
|
|||
String facetValue = facetValues.get(random.nextInt(facetValues.size()));
|
||||
uniqueFacetValues.add(facetValue);
|
||||
if (!facetToGroups.containsKey(facetValue)) {
|
||||
facetToGroups.put(facetValue, new HashSet<String>());
|
||||
facetToGroups.put(facetValue, new HashSet<>());
|
||||
}
|
||||
Set<String> groupsInFacet = facetToGroups.get(facetValue);
|
||||
groupsInFacet.add(groupValue);
|
||||
|
@ -740,21 +733,15 @@ public class TestGroupFacetCollector extends AbstractGroupingTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
Collections.sort(
|
||||
entries,
|
||||
new Comparator<TermGroupFacetCollector.FacetEntry>() {
|
||||
|
||||
@Override
|
||||
public int compare(
|
||||
TermGroupFacetCollector.FacetEntry a, TermGroupFacetCollector.FacetEntry b) {
|
||||
if (orderByCount) {
|
||||
int cmp = b.getCount() - a.getCount();
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
entries.sort(
|
||||
(a, b) -> {
|
||||
if (orderByCount) {
|
||||
int cmp = b.getCount() - a.getCount();
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
return a.getValue().compareTo(b.getValue());
|
||||
}
|
||||
return a.getValue().compareTo(b.getValue());
|
||||
});
|
||||
|
||||
int endOffset = offset + limit;
|
||||
|
|
|
@ -22,7 +22,6 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -195,7 +194,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
public void testSimpleOrdinalsJoin() throws Exception {
|
||||
final String idField = "id";
|
||||
final String productIdField = "productId";
|
||||
// A field indicating to what type a document belongs, which is then used to distinques between
|
||||
// A field indicating to what type a document belongs, which is then used to distinguish between
|
||||
// documents during joining.
|
||||
final String typeField = "type";
|
||||
// A single sorted doc values field that holds the join values for all document types.
|
||||
|
@ -310,7 +309,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
public void testOrdinalsJoinExplainNoMatches() throws Exception {
|
||||
final String idField = "id";
|
||||
final String productIdField = "productId";
|
||||
// A field indicating to what type a document belongs, which is then used to distinques between
|
||||
// A field indicating to what type a document belongs, which is then used to distinguish between
|
||||
// documents during joining.
|
||||
final String typeField = "type";
|
||||
// A single sorted doc values field that holds the join values for all document types.
|
||||
|
@ -1496,7 +1495,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
final Query joinQuery;
|
||||
{
|
||||
// single val can be handled by multiple-vals
|
||||
final boolean muliValsQuery = multipleValuesPerDocument || random().nextBoolean();
|
||||
final boolean multiValsQuery = multipleValuesPerDocument || random().nextBoolean();
|
||||
final String fromField = from ? "from" : "to";
|
||||
final String toField = from ? "to" : "from";
|
||||
|
||||
|
@ -1521,7 +1520,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
joinQuery =
|
||||
JoinUtil.createJoinQuery(
|
||||
fromField + suffix,
|
||||
muliValsQuery,
|
||||
multiValsQuery,
|
||||
toField + suffix,
|
||||
numType,
|
||||
actualQuery,
|
||||
|
@ -1531,7 +1530,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
case 1:
|
||||
joinQuery =
|
||||
JoinUtil.createJoinQuery(
|
||||
fromField, muliValsQuery, toField, actualQuery, indexSearcher, scoreMode);
|
||||
fromField, multiValsQuery, toField, actualQuery, indexSearcher, scoreMode);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("unexpected value " + surpriseMe);
|
||||
|
@ -1568,24 +1567,22 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
for (int doc = iterator.nextDoc();
|
||||
doc != DocIdSetIterator.NO_MORE_DOCS;
|
||||
doc = iterator.nextDoc()) {
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"Expected doc[%d] with id value %s",
|
||||
doc,
|
||||
indexSearcher.storedFields().document(doc).get("id")));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"Expected doc[%d] with id value %s%n",
|
||||
doc,
|
||||
indexSearcher.storedFields().document(doc).get("id"));
|
||||
}
|
||||
System.out.println("actual cardinality:" + actualResult.cardinality());
|
||||
iterator = new BitSetIterator(actualResult, actualResult.cardinality());
|
||||
for (int doc = iterator.nextDoc();
|
||||
doc != DocIdSetIterator.NO_MORE_DOCS;
|
||||
doc = iterator.nextDoc()) {
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"Actual doc[%d] with id value %s",
|
||||
doc,
|
||||
indexSearcher.storedFields().document(doc).get("id")));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"Actual doc[%d] with id value %s%n",
|
||||
doc,
|
||||
indexSearcher.storedFields().document(doc).get("id"));
|
||||
}
|
||||
}
|
||||
assertEquals(expectedResult, actualResult);
|
||||
|
@ -1661,7 +1658,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
final int nextInt = random.nextInt(0xFFFFFF);
|
||||
uniqueRandomValue = String.format(Locale.ROOT, "%08x", nextInt);
|
||||
assert nextInt == Integer.parseUnsignedInt(uniqueRandomValue, 16);
|
||||
} while ("".equals(uniqueRandomValue) || trackSet.contains(uniqueRandomValue));
|
||||
} while (uniqueRandomValue.isEmpty() || trackSet.contains(uniqueRandomValue));
|
||||
|
||||
// Generate unique values and empty strings aren't allowed.
|
||||
trackSet.add(uniqueRandomValue);
|
||||
|
@ -1867,8 +1864,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
Terms terms = MultiTerms.getTerms(topLevelReader, toField);
|
||||
if (terms != null) {
|
||||
PostingsEnum postingsEnum = null;
|
||||
SortedSet<BytesRef> joinValues = new TreeSet<>();
|
||||
joinValues.addAll(joinValueToJoinScores.keySet());
|
||||
SortedSet<BytesRef> joinValues = new TreeSet<>(joinValueToJoinScores.keySet());
|
||||
for (BytesRef joinValue : joinValues) {
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
if (termsEnum.seekExact(joinValue)) {
|
||||
|
@ -1993,22 +1989,16 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
hitsToJoinScores = context.toHitsToJoinScore.get(queryValue);
|
||||
}
|
||||
List<Map.Entry<Integer, JoinScore>> hits = new ArrayList<>(hitsToJoinScores.entrySet());
|
||||
Collections.sort(
|
||||
hits,
|
||||
new Comparator<Map.Entry<Integer, JoinScore>>() {
|
||||
hits.sort(
|
||||
(hit1, hit2) -> {
|
||||
float score1 = hit1.getValue().score(scoreMode);
|
||||
float score2 = hit2.getValue().score(scoreMode);
|
||||
|
||||
@Override
|
||||
public int compare(
|
||||
Map.Entry<Integer, JoinScore> hit1, Map.Entry<Integer, JoinScore> hit2) {
|
||||
float score1 = hit1.getValue().score(scoreMode);
|
||||
float score2 = hit2.getValue().score(scoreMode);
|
||||
|
||||
int cmp = Float.compare(score2, score1);
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
return hit1.getKey() - hit2.getKey();
|
||||
int cmp = Float.compare(score2, score1);
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
return hit1.getKey() - hit2.getKey();
|
||||
});
|
||||
ScoreDoc[] scoreDocs = new ScoreDoc[Math.min(10, hits.size())];
|
||||
for (int i = 0; i < scoreDocs.length; i++) {
|
||||
|
|
|
@ -72,7 +72,7 @@ public abstract class Node implements Closeable {
|
|||
protected ReferenceManager<IndexSearcher> mgr;
|
||||
|
||||
/**
|
||||
* Startup time of original test, carefully propogated to all nodes to produce consistent "seconds
|
||||
* Startup time of original test, carefully propagated to all nodes to produce consistent "seconds
|
||||
* since start time" in messages
|
||||
*/
|
||||
public static long globalStartNS;
|
||||
|
@ -118,45 +118,42 @@ public abstract class Node implements Closeable {
|
|||
public static void nodeMessage(PrintStream printStream, String message) {
|
||||
if (printStream != null) {
|
||||
long now = System.nanoTime();
|
||||
printStream.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%5.3fs %5.1fs: [%11s] %s",
|
||||
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
Thread.currentThread().getName(),
|
||||
message));
|
||||
printStream.printf(
|
||||
Locale.ROOT,
|
||||
"%5.3fs %5.1fs: [%11s] %s%n",
|
||||
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
Thread.currentThread().getName(),
|
||||
message);
|
||||
}
|
||||
}
|
||||
|
||||
public static void nodeMessage(PrintStream printStream, int id, String message) {
|
||||
if (printStream != null) {
|
||||
long now = System.nanoTime();
|
||||
printStream.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%5.3fs %5.1fs: N%d [%11s] %s",
|
||||
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
id,
|
||||
Thread.currentThread().getName(),
|
||||
message));
|
||||
printStream.printf(
|
||||
Locale.ROOT,
|
||||
"%5.3fs %5.1fs: N%d [%11s] %s%n",
|
||||
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
id,
|
||||
Thread.currentThread().getName(),
|
||||
message);
|
||||
}
|
||||
}
|
||||
|
||||
public void message(String message) {
|
||||
if (printStream != null) {
|
||||
long now = System.nanoTime();
|
||||
printStream.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%5.3fs %5.1fs: %7s %2s [%11s] %s",
|
||||
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
state,
|
||||
name(),
|
||||
Thread.currentThread().getName(),
|
||||
message));
|
||||
printStream.printf(
|
||||
Locale.ROOT,
|
||||
"%5.3fs %5.1fs: %7s %2s [%11s] %s%n",
|
||||
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
state,
|
||||
name(),
|
||||
Thread.currentThread().getName(),
|
||||
message);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -130,7 +130,7 @@ public class TestNRTReplication extends LuceneTestCase {
|
|||
int tcpPort = -1;
|
||||
long initCommitVersion = -1;
|
||||
long initInfosVersion = -1;
|
||||
Pattern logTimeStart = Pattern.compile("^[0-9\\.]+s .*");
|
||||
Pattern logTimeStart = Pattern.compile("^[0-9.]+s .*");
|
||||
|
||||
while (true) {
|
||||
String l = r.readLine();
|
||||
|
@ -173,24 +173,21 @@ public class TestNRTReplication extends LuceneTestCase {
|
|||
AtomicBoolean nodeClosing = new AtomicBoolean();
|
||||
Thread pumper =
|
||||
ThreadPumper.start(
|
||||
new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
message("now wait for process " + p);
|
||||
try {
|
||||
p.waitFor();
|
||||
} catch (Throwable t) {
|
||||
throw new RuntimeException(t);
|
||||
}
|
||||
() -> {
|
||||
message("now wait for process " + p);
|
||||
try {
|
||||
p.waitFor();
|
||||
} catch (Throwable t) {
|
||||
throw new RuntimeException(t);
|
||||
}
|
||||
|
||||
message("done wait for process " + p);
|
||||
int exitValue = p.exitValue();
|
||||
message("exit value=" + exitValue + " willCrash=" + finalWillCrash);
|
||||
if (exitValue != 0 && finalWillCrash == false) {
|
||||
// should fail test
|
||||
throw new RuntimeException(
|
||||
"node " + id + " process had unexpected non-zero exit status=" + exitValue);
|
||||
}
|
||||
message("done wait for process " + p);
|
||||
int exitValue = p.exitValue();
|
||||
message("exit value=" + exitValue + " willCrash=" + finalWillCrash);
|
||||
if (exitValue != 0 && finalWillCrash == false) {
|
||||
// should fail test
|
||||
throw new RuntimeException(
|
||||
"node " + id + " process had unexpected non-zero exit status=" + exitValue);
|
||||
}
|
||||
},
|
||||
r,
|
||||
|
@ -656,7 +653,7 @@ public class TestNRTReplication extends LuceneTestCase {
|
|||
primary.crash();
|
||||
|
||||
// At this point replica is "in the future": it has 10 docs committed, but the primary crashed
|
||||
// before committing so it has 0 docs
|
||||
// before committing, so it has 0 docs
|
||||
|
||||
// Restart primary:
|
||||
primary = startNode(-1, 0, path1, -1, true);
|
||||
|
@ -735,10 +732,7 @@ public class TestNRTReplication extends LuceneTestCase {
|
|||
private void assertWriteLockHeld(Path path) throws Exception {
|
||||
try (FSDirectory dir = FSDirectory.open(path)) {
|
||||
expectThrows(
|
||||
LockObtainFailedException.class,
|
||||
() -> {
|
||||
dir.obtainLock(IndexWriter.WRITE_LOCK_NAME);
|
||||
});
|
||||
LockObtainFailedException.class, () -> dir.obtainLock(IndexWriter.WRITE_LOCK_NAME));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -948,8 +942,7 @@ public class TestNRTReplication extends LuceneTestCase {
|
|||
try (Connection c = new Connection(primary.tcpPort)) {
|
||||
c.out.writeByte(SimplePrimaryNode.CMD_SET_REPLICAS);
|
||||
c.out.writeVInt(replicas.length);
|
||||
for (int id = 0; id < replicas.length; id++) {
|
||||
NodeProcess replica = replicas[id];
|
||||
for (NodeProcess replica : replicas) {
|
||||
c.out.writeVInt(replica.id);
|
||||
c.out.writeVInt(replica.tcpPort);
|
||||
}
|
||||
|
@ -998,12 +991,11 @@ public class TestNRTReplication extends LuceneTestCase {
|
|||
|
||||
static void message(String message) {
|
||||
long now = System.nanoTime();
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%5.3fs : parent [%11s] %s",
|
||||
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
Thread.currentThread().getName(),
|
||||
message));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"%5.3fs : parent [%11s] %s%n",
|
||||
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
Thread.currentThread().getName(),
|
||||
message);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,8 +94,7 @@ import org.apache.lucene.util.ThreadInterruptedException;
|
|||
// hazardous window
|
||||
// - replica comes up just as the primary is crashing / moving
|
||||
// - electing a new primary when a replica is just finishing its nrt sync: we need to wait for it
|
||||
// so we are sure to get the "most up to
|
||||
// date" replica
|
||||
// so we are sure to get the "most up to date" replica
|
||||
// - replica comes up after merged segment finished so it doesn't copy over the merged segment
|
||||
// "promptly" (i.e. only sees it on NRT refresh)
|
||||
|
||||
|
@ -210,7 +209,7 @@ public class TestStressNRTReplication extends LuceneTestCase {
|
|||
if (NUM_NODES == null) {
|
||||
numNodes = TestUtil.nextInt(random(), 2, 10);
|
||||
} else {
|
||||
numNodes = NUM_NODES.intValue();
|
||||
numNodes = NUM_NODES;
|
||||
}
|
||||
|
||||
System.out.println("TEST: using " + numNodes + " nodes");
|
||||
|
@ -347,7 +346,7 @@ public class TestStressNRTReplication extends LuceneTestCase {
|
|||
}
|
||||
restarter.join();
|
||||
|
||||
// Close replicas before primary so we cancel any in-progres replications:
|
||||
// Close replicas before primary so we cancel any in-progress replications:
|
||||
System.out.println("TEST: top: now close replicas");
|
||||
List<Closeable> toClose = new ArrayList<>();
|
||||
for (NodeProcess node : nodes) {
|
||||
|
@ -484,7 +483,7 @@ public class TestStressNRTReplication extends LuceneTestCase {
|
|||
|
||||
// When the primary starts, the userData in its latest commit point tells us which version it
|
||||
// had indexed up to, so we know where to
|
||||
// replay from in the xlog. However, we forcefuly advance the version, and then IW on init (or
|
||||
// replay from in the xlog. However, we forcefully advance the version, and then IW on init (or
|
||||
// maybe getReader) also adds 1 to it.
|
||||
// Since we publish the primary in this state (before xlog replay is done), a replica can start
|
||||
// up at this point and pull this version,
|
||||
|
@ -648,7 +647,7 @@ public class TestStressNRTReplication extends LuceneTestCase {
|
|||
int tcpPort = -1;
|
||||
long initCommitVersion = -1;
|
||||
long initInfosVersion = -1;
|
||||
Pattern logTimeStart = Pattern.compile("^[0-9\\.]+s .*");
|
||||
Pattern logTimeStart = Pattern.compile("^[0-9.]+s .*");
|
||||
boolean willCrash = false;
|
||||
|
||||
while (true) {
|
||||
|
@ -723,48 +722,43 @@ public class TestStressNRTReplication extends LuceneTestCase {
|
|||
// nodeClosed once it exits:
|
||||
Thread pumper =
|
||||
ThreadPumper.start(
|
||||
new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
message("now wait for process " + p);
|
||||
try {
|
||||
p.waitFor();
|
||||
} catch (Throwable t) {
|
||||
throw new RuntimeException(t);
|
||||
}
|
||||
|
||||
message("done wait for process " + p);
|
||||
int exitValue = p.exitValue();
|
||||
message("exit value=" + exitValue + " willCrash=" + finalWillCrash);
|
||||
if (childLog != null) {
|
||||
try {
|
||||
childLog.write("process done; exitValue=" + exitValue + "\n");
|
||||
childLog.close();
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException(ioe);
|
||||
}
|
||||
}
|
||||
if (exitValue != 0
|
||||
&& finalWillCrash == false
|
||||
&& crashingNodes.remove(id) == false) {
|
||||
// should fail test
|
||||
failed.set(true);
|
||||
if (childLog != null) {
|
||||
throw new RuntimeException(
|
||||
"node "
|
||||
+ id
|
||||
+ " process had unexpected non-zero exit status="
|
||||
+ exitValue
|
||||
+ "; see "
|
||||
+ childLog
|
||||
+ " for details");
|
||||
} else {
|
||||
throw new RuntimeException(
|
||||
"node " + id + " process had unexpected non-zero exit status=" + exitValue);
|
||||
}
|
||||
}
|
||||
nodeClosed(id);
|
||||
() -> {
|
||||
message("now wait for process " + p);
|
||||
try {
|
||||
p.waitFor();
|
||||
} catch (Throwable t) {
|
||||
throw new RuntimeException(t);
|
||||
}
|
||||
|
||||
message("done wait for process " + p);
|
||||
int exitValue = p.exitValue();
|
||||
message("exit value=" + exitValue + " willCrash=" + finalWillCrash);
|
||||
if (childLog != null) {
|
||||
try {
|
||||
childLog.write("process done; exitValue=" + exitValue + "\n");
|
||||
childLog.close();
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException(ioe);
|
||||
}
|
||||
}
|
||||
if (exitValue != 0 && finalWillCrash == false && crashingNodes.remove(id) == false) {
|
||||
// should fail test
|
||||
failed.set(true);
|
||||
if (childLog != null) {
|
||||
throw new RuntimeException(
|
||||
"node "
|
||||
+ id
|
||||
+ " process had unexpected non-zero exit status="
|
||||
+ exitValue
|
||||
+ "; see "
|
||||
+ childLog
|
||||
+ " for details");
|
||||
} else {
|
||||
throw new RuntimeException(
|
||||
"node " + id + " process had unexpected non-zero exit status=" + exitValue);
|
||||
}
|
||||
}
|
||||
nodeClosed(id);
|
||||
},
|
||||
r,
|
||||
System.out,
|
||||
|
@ -920,7 +914,7 @@ public class TestStressNRTReplication extends LuceneTestCase {
|
|||
}
|
||||
b.append(String.format(Locale.ROOT, "%s%d(%.1fs)", prefix, i, sec));
|
||||
}
|
||||
message("node status" + b.toString());
|
||||
message("node status" + b);
|
||||
message("downNodes=" + downNodes);
|
||||
|
||||
// If primary is down, promote a replica:
|
||||
|
@ -1084,23 +1078,18 @@ public class TestStressNRTReplication extends LuceneTestCase {
|
|||
} else {
|
||||
// Just ensure that all nodes show the same hit count for
|
||||
// the same version, i.e. they really are replicas of one another:
|
||||
if (oldHitCount.intValue() != hitCount) {
|
||||
if (oldHitCount != hitCount) {
|
||||
failed.set(true);
|
||||
stop.set(true);
|
||||
message(
|
||||
"top: searcher: wrong version hitCount: version="
|
||||
+ version
|
||||
+ " oldHitCount="
|
||||
+ oldHitCount.intValue()
|
||||
+ oldHitCount
|
||||
+ " hitCount="
|
||||
+ hitCount);
|
||||
fail(
|
||||
"version="
|
||||
+ version
|
||||
+ " oldHitCount="
|
||||
+ oldHitCount.intValue()
|
||||
+ " hitCount="
|
||||
+ hitCount);
|
||||
"version=" + version + " oldHitCount=" + oldHitCount + " hitCount=" + hitCount);
|
||||
}
|
||||
}
|
||||
} catch (
|
||||
|
@ -1333,24 +1322,22 @@ public class TestStressNRTReplication extends LuceneTestCase {
|
|||
|
||||
static void message(String message) {
|
||||
long now = System.nanoTime();
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%5.3fs : parent [%11s] %s",
|
||||
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
Thread.currentThread().getName(),
|
||||
message));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"%5.3fs : parent [%11s] %s%n",
|
||||
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
Thread.currentThread().getName(),
|
||||
message);
|
||||
}
|
||||
|
||||
static void message(String message, long localStartNS) {
|
||||
long now = System.nanoTime();
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%5.3fs %5.1fs: parent [%11s] %s",
|
||||
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
Thread.currentThread().getName(),
|
||||
message));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"%5.3fs %5.1fs: parent [%11s] %s%n",
|
||||
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
|
||||
Thread.currentThread().getName(),
|
||||
message);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -264,14 +264,12 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
|
||||
TokenStream ts =
|
||||
new CannedTokenStream(
|
||||
new Token[] {
|
||||
token("fast", 1, 1),
|
||||
token("speedy", 0, 1),
|
||||
token("wi", 1, 1),
|
||||
token("wifi", 0, 2),
|
||||
token("fi", 1, 1),
|
||||
token("network", 1, 1)
|
||||
});
|
||||
token("fast", 1, 1),
|
||||
token("speedy", 0, 1),
|
||||
token("wi", 1, 1),
|
||||
token("wifi", 0, 2),
|
||||
token("fi", 1, 1),
|
||||
token("network", 1, 1));
|
||||
|
||||
TermAutomatonQuery q = new TokenStreamToTermAutomatonQuery().toQuery("field", ts);
|
||||
// System.out.println("DOT: " + q.toDot());
|
||||
|
@ -322,11 +320,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
q.setAccept(s2, true);
|
||||
q.addAnyTransition(s0, s1);
|
||||
q.addTransition(s1, s2, "b");
|
||||
expectThrows(
|
||||
IllegalStateException.class,
|
||||
() -> {
|
||||
q.finish();
|
||||
});
|
||||
expectThrows(IllegalStateException.class, q::finish);
|
||||
}
|
||||
|
||||
public void testInvalidTrailWithAny() throws Exception {
|
||||
|
@ -337,11 +331,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
q.setAccept(s2, true);
|
||||
q.addTransition(s0, s1, "b");
|
||||
q.addAnyTransition(s1, s2);
|
||||
expectThrows(
|
||||
IllegalStateException.class,
|
||||
() -> {
|
||||
q.finish();
|
||||
});
|
||||
expectThrows(IllegalStateException.class, q::finish);
|
||||
}
|
||||
|
||||
public void testAnyFromTokenStream() throws Exception {
|
||||
|
@ -369,13 +359,11 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
|
||||
TokenStream ts =
|
||||
new CannedTokenStream(
|
||||
new Token[] {
|
||||
token("comes", 1, 1),
|
||||
token("comes", 0, 2),
|
||||
token("*", 1, 1),
|
||||
token("sun", 1, 1),
|
||||
token("moon", 0, 1)
|
||||
});
|
||||
token("comes", 1, 1),
|
||||
token("comes", 0, 2),
|
||||
token("*", 1, 1),
|
||||
token("sun", 1, 1),
|
||||
token("moon", 0, 1));
|
||||
|
||||
TermAutomatonQuery q = new TokenStreamToTermAutomatonQuery().toQuery("field", ts);
|
||||
// System.out.println("DOT: " + q.toDot());
|
||||
|
@ -443,9 +431,9 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
public TokenStreamComponents createComponents(String fieldName) {
|
||||
MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, true, 100);
|
||||
tokenizer.setEnableChecks(true);
|
||||
TokenFilter filt = new MockTokenFilter(tokenizer, MockTokenFilter.EMPTY_STOPSET);
|
||||
filt = new RandomSynonymFilter(filt);
|
||||
return new TokenStreamComponents(tokenizer, filt);
|
||||
TokenFilter filter = new MockTokenFilter(tokenizer, MockTokenFilter.EMPTY_STOPSET);
|
||||
filter = new RandomSynonymFilter(filter);
|
||||
return new TokenStreamComponents(tokenizer, filter);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -570,14 +558,12 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
System.out.println("FAILED:");
|
||||
for (String id : hits1Docs) {
|
||||
if (hits2Docs.contains(id) == false) {
|
||||
System.out.println(
|
||||
String.format(Locale.ROOT, " id=%3s matched but should not have", id));
|
||||
System.out.printf(Locale.ROOT, " id=%3s matched but should not have%n", id);
|
||||
}
|
||||
}
|
||||
for (String id : hits2Docs) {
|
||||
if (hits1Docs.contains(id) == false) {
|
||||
System.out.println(
|
||||
String.format(Locale.ROOT, " id=%3s did not match but should have", id));
|
||||
System.out.printf(Locale.ROOT, " id=%3s did not match but should have%n", id);
|
||||
}
|
||||
}
|
||||
throw ae;
|
||||
|
@ -598,7 +584,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
|
||||
private static class RandomQuery extends Query {
|
||||
private final long seed;
|
||||
private float density;
|
||||
private final float density;
|
||||
|
||||
// density should be 0.0 ... 1.0
|
||||
public RandomQuery(long seed, float density) {
|
||||
|
@ -731,11 +717,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
IndexReader r = w.getReader();
|
||||
IndexSearcher s = newSearcher(r);
|
||||
|
||||
TokenStream ts =
|
||||
new CannedTokenStream(
|
||||
new Token[] {
|
||||
token("a", 1, 1),
|
||||
});
|
||||
TokenStream ts = new CannedTokenStream(token("a", 1, 1));
|
||||
|
||||
TermAutomatonQuery q = new TokenStreamToTermAutomatonQuery().toQuery("field", ts);
|
||||
// System.out.println("DOT: " + q.toDot());
|
||||
|
@ -756,11 +738,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
|
|||
IndexReader r = w.getReader();
|
||||
IndexSearcher s = newSearcher(r);
|
||||
|
||||
TokenStream ts =
|
||||
new CannedTokenStream(
|
||||
new Token[] {
|
||||
token("a", 1, 1), token("x", 1, 1),
|
||||
});
|
||||
TokenStream ts = new CannedTokenStream(token("a", 1, 1), token("x", 1, 1));
|
||||
|
||||
TermAutomatonQuery q = new TokenStreamToTermAutomatonQuery().toQuery("field", ts);
|
||||
// System.out.println("DOT: " + q.toDot());
|
||||
|
|
|
@ -50,7 +50,6 @@ import org.junit.Ignore;
|
|||
/** Benchmarks tests for implementations of {@link Lookup} interface. */
|
||||
@Ignore("COMMENT ME TO RUN BENCHMARKS!")
|
||||
public class TestLookupBenchmark extends LuceneTestCase {
|
||||
@SuppressWarnings({"unchecked", "deprecation"})
|
||||
private final List<Class<? extends Lookup>> benchmarkClasses =
|
||||
Arrays.asList(
|
||||
FuzzySuggester.class,
|
||||
|
@ -82,7 +81,7 @@ public class TestLookupBenchmark extends LuceneTestCase {
|
|||
assert false : "disable assertions before running benchmarks!";
|
||||
List<Input> input = readTop50KWiki();
|
||||
Collections.shuffle(input, random);
|
||||
TestLookupBenchmark.dictionaryInput = input.toArray(new Input[input.size()]);
|
||||
TestLookupBenchmark.dictionaryInput = input.toArray(new Input[0]);
|
||||
Collections.shuffle(input, random);
|
||||
TestLookupBenchmark.benchmarkInput = input;
|
||||
}
|
||||
|
@ -96,7 +95,7 @@ public class TestLookupBenchmark extends LuceneTestCase {
|
|||
URL resource =
|
||||
IOUtils.requireResourceNonNull(TestLookupBenchmark.class.getResource(name), name);
|
||||
|
||||
String line = null;
|
||||
String line;
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(resource.openStream(), UTF_8));
|
||||
while ((line = br.readLine()) != null) {
|
||||
int tab = line.indexOf('|');
|
||||
|
@ -115,21 +114,17 @@ public class TestLookupBenchmark extends LuceneTestCase {
|
|||
for (final Class<? extends Lookup> cls : benchmarkClasses) {
|
||||
BenchmarkResult result =
|
||||
measure(
|
||||
new Callable<Integer>() {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
final Lookup lookup = buildLookup(cls, dictionaryInput);
|
||||
return lookup.hashCode();
|
||||
}
|
||||
() -> {
|
||||
final Lookup lookup = buildLookup(cls, dictionaryInput);
|
||||
return lookup.hashCode();
|
||||
});
|
||||
|
||||
System.err.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%-15s input: %d, time[ms]: %s",
|
||||
cls.getSimpleName(),
|
||||
dictionaryInput.length,
|
||||
result.average.toString()));
|
||||
System.err.printf(
|
||||
Locale.ROOT,
|
||||
"%-15s input: %d, time[ms]: %s%n",
|
||||
cls.getSimpleName(),
|
||||
dictionaryInput.length,
|
||||
result.average);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -139,15 +134,14 @@ public class TestLookupBenchmark extends LuceneTestCase {
|
|||
for (Class<? extends Lookup> cls : benchmarkClasses) {
|
||||
Lookup lookup = buildLookup(cls, dictionaryInput);
|
||||
long sizeInBytes = lookup.ramBytesUsed();
|
||||
System.err.println(
|
||||
String.format(
|
||||
Locale.ROOT, "%-15s size[B]:%,13d", lookup.getClass().getSimpleName(), sizeInBytes));
|
||||
System.err.printf(
|
||||
Locale.ROOT, "%-15s size[B]:%,13d%n", lookup.getClass().getSimpleName(), sizeInBytes);
|
||||
}
|
||||
}
|
||||
|
||||
/** Create {@link Lookup} instance and populate it. */
|
||||
private Lookup buildLookup(Class<? extends Lookup> cls, Input[] input) throws Exception {
|
||||
Lookup lookup = null;
|
||||
Lookup lookup;
|
||||
if (cls == TSTLookup.class
|
||||
|| cls == FSTCompletionLookup.class
|
||||
|| cls == WFSTCompletionLookup.class) {
|
||||
|
@ -203,14 +197,13 @@ public class TestLookupBenchmark extends LuceneTestCase {
|
|||
public void runPerformanceTest(
|
||||
final int minPrefixLen, final int maxPrefixLen, final int num, final boolean onlyMorePopular)
|
||||
throws Exception {
|
||||
System.err.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"-- prefixes: %d-%d, num: %d, onlyMorePopular: %s",
|
||||
minPrefixLen,
|
||||
maxPrefixLen,
|
||||
num,
|
||||
onlyMorePopular));
|
||||
System.err.printf(
|
||||
Locale.ROOT,
|
||||
"-- prefixes: %d-%d, num: %d, onlyMorePopular: %s%n",
|
||||
minPrefixLen,
|
||||
maxPrefixLen,
|
||||
num,
|
||||
onlyMorePopular);
|
||||
|
||||
for (Class<? extends Lookup> cls : benchmarkClasses) {
|
||||
final Lookup lookup = buildLookup(cls, dictionaryInput);
|
||||
|
@ -228,25 +221,21 @@ public class TestLookupBenchmark extends LuceneTestCase {
|
|||
|
||||
BenchmarkResult result =
|
||||
measure(
|
||||
new Callable<Integer>() {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
int v = 0;
|
||||
for (String term : input) {
|
||||
v += lookup.lookup(term, onlyMorePopular, num).size();
|
||||
}
|
||||
return v;
|
||||
() -> {
|
||||
int v = 0;
|
||||
for (String term : input) {
|
||||
v += lookup.lookup(term, onlyMorePopular, num).size();
|
||||
}
|
||||
return v;
|
||||
});
|
||||
|
||||
System.err.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%-15s queries: %d, time[ms]: %s, ~kQPS: %.0f",
|
||||
lookup.getClass().getSimpleName(),
|
||||
input.size(),
|
||||
result.average.toString(),
|
||||
input.size() / result.average.avg));
|
||||
System.err.printf(
|
||||
Locale.ROOT,
|
||||
"%-15s queries: %d, time[ms]: %s, ~kQPS: %.0f%n",
|
||||
lookup.getClass().getSimpleName(),
|
||||
input.size(),
|
||||
result.average,
|
||||
input.size() / result.average.avg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -258,7 +247,7 @@ public class TestLookupBenchmark extends LuceneTestCase {
|
|||
List<Double> times = new ArrayList<>();
|
||||
for (int i = 0; i < warmup + rounds; i++) {
|
||||
final long start = System.nanoTime();
|
||||
guard = callable.call().intValue();
|
||||
guard = callable.call();
|
||||
times.add((System.nanoTime() - start) / NANOS_PER_MS);
|
||||
}
|
||||
return new BenchmarkResult(times, warmup, rounds);
|
||||
|
|
|
@ -24,13 +24,12 @@ import org.junit.Test;
|
|||
public class TestJvmInfo extends RandomizedTest {
|
||||
@Test
|
||||
public void testEchoJvmInfo() {
|
||||
System.out.println(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"This test runs with Java %s (%s, %s %s).",
|
||||
System.getProperty("java.version"),
|
||||
System.getProperty("java.vendor"),
|
||||
System.getProperty("java.vm.name"),
|
||||
System.getProperty("java.vm.version")));
|
||||
System.out.printf(
|
||||
Locale.ROOT,
|
||||
"This test runs with Java %s (%s, %s %s).%n",
|
||||
System.getProperty("java.version"),
|
||||
System.getProperty("java.vendor"),
|
||||
System.getProperty("java.vm.name"),
|
||||
System.getProperty("java.vm.version"));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue