Fix nanos to millis conversion for tests (#11856)

This commit is contained in:
Marios Trivyzas 2022-10-29 10:05:17 +03:00 committed by GitHub
parent 26ec0dd44c
commit 3210a42f09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 213 additions and 113 deletions

View File

@ -160,6 +160,9 @@ Optimizations
Other
---------------------
* GITHUB#11856: Fix nanos to millis conversion for tests (Marios Trivyzas)
* LUCENE-10423: Remove usages of System.currentTimeMillis() from tests. (Marios Trivyzas)
* GITHUB#11811: Upgrade google java format to 1.15.0 (Dawid Weiss)

View File

@ -258,7 +258,7 @@ public class TestPerformance extends LuceneTestCase {
for (int i = 0; i < 7; i++) {
long start = System.nanoTime();
iteration.run(consumer);
times.add((System.nanoTime() - start) / 1_000_000);
times.add(TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start));
}
double average = times.stream().mapToLong(Long::longValue).average().orElseThrow();
System.out.println(

View File

@ -20,6 +20,7 @@ import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.lucene.analysis.Analyzer;
@ -261,7 +262,7 @@ public class TestPatternReplaceCharFilter extends BaseTokenStreamTestCase {
if (matcher.find()) {
System.out.println(matcher.group());
}
System.out.println(i + " > " + (System.nanoTime() - t) / 1_000_000_000.0);
System.out.println(i + " > " + TimeUnit.NANOSECONDS.toSeconds(System.nanoTime() - t));
}
}

View File

@ -19,6 +19,7 @@ package org.apache.lucene.analysis.sinks;
import java.io.IOException;
import java.io.StringReader;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.FilteringTokenFilter;
@ -199,7 +200,8 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
}
long finish = System.nanoTime();
System.out.println(
"ModCount: " + modCounts[j] + " Two fields took " + (finish - start) / 100_000 + " ms");
("ModCount: " + modCounts[j])
+ (" Two fields took " + TimeUnit.NANOSECONDS.toMillis(finish - start) + " ms"));
int sinkPos = 0;
// simulate one field with one sink
start = System.nanoTime();
@ -219,7 +221,8 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
}
finish = System.nanoTime();
System.out.println(
"ModCount: " + modCounts[j] + " Tee fields took " + (finish - start) / 100_000 + " ms");
("ModCount: " + modCounts[j])
+ (" Tee fields took " + TimeUnit.NANOSECONDS.toMillis(finish - start) + " ms"));
assertTrue(sinkPos + " does not equal: " + tfPos, sinkPos == tfPos);
}
System.out.println("- End Tokens: " + tokCount[k] + "-----");

View File

@ -26,6 +26,7 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
@ -711,7 +712,7 @@ public class TestJapaneseTokenizer extends BaseTokenStreamTestCase {
nonCompoundCount++;
if (nonCompoundCount % 1000000 == 0) {
System.out.println(String.format("%.2f msec [pos=%d, %d, %d]",
(System.nanoTime()-startTimeNS)/1000000.0,
(System.nanoTime() - startTimeNS) / (double) TimeUnit.MILLISECONDS.toNanos(1),
netOffset + offsetAtt.startOffset(),
nonCompoundCount,
compoundCount));
@ -764,7 +765,8 @@ public class TestJapaneseTokenizer extends BaseTokenStreamTestCase {
}
String[] sentences = line.split("、|。");
if (VERBOSE) {
System.out.println("Total time : " + (System.nanoTime() - totalStart) / 100_000);
System.out.println(
"Total time : " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - totalStart) + " ms");
System.out.println(
"Test for Bocchan with pre-splitting sentences (" + sentences.length + " sentences)");
}
@ -780,7 +782,8 @@ public class TestJapaneseTokenizer extends BaseTokenStreamTestCase {
}
}
if (VERBOSE) {
System.out.println("Total time : " + (System.nanoTime() - totalStart) / 100_000);
System.out.println(
"Total time : " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - totalStart) + " ms");
}
}

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import java.util.SplittableRandom;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.RandomAccessVectorValues;
import org.apache.lucene.index.VectorSimilarityFunction;
import org.apache.lucene.util.InfoStream;
@ -123,8 +124,8 @@ public final class Lucene90HnswGraphBuilder {
Locale.ROOT,
"built %d in %d/%d ms",
node,
((now - t) / 1_000_000),
((now - start) / 1_000_000)));
TimeUnit.NANOSECONDS.toMillis(now - t),
TimeUnit.NANOSECONDS.toMillis(now - start)));
t = now;
}
}

View File

@ -1021,8 +1021,8 @@ public class BKDWriter60 implements Closeable {
// If no exception, we should have cleaned everything up:
assert tempDir.getCreatedFiles().isEmpty();
// long t2 = System.nanoTime();
// System.out.println("write time: " + ((t2-t1)/1000000.0) + " msec");
// System.out.println("write time: " + ((System.nanoTime() - t1) / (double)
// TimeUnit.MILLISECONDS.toNanos(1)) + " ms");
success = true;
} finally {

View File

@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import java.util.SplittableRandom;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.RandomAccessVectorValues;
import org.apache.lucene.index.VectorEncoding;
import org.apache.lucene.index.VectorSimilarityFunction;
@ -176,8 +177,8 @@ public final class Lucene91HnswGraphBuilder {
Locale.ROOT,
"built %d in %d/%d ms",
node,
((now - t) / 1_000_000),
((now - start) / 1_000_000)));
TimeUnit.NANOSECONDS.toMillis(now - t),
TimeUnit.NANOSECONDS.toMillis(now - start)));
return now;
}

View File

@ -71,7 +71,7 @@ public class NearRealtimeReaderTask extends PerfTask {
long waitForMsec = (pauseMSec - (System.currentTimeMillis() - t));
if (waitForMsec > 0) {
Thread.sleep(waitForMsec);
// System.out.println("NRT wait: " + waitForMsec + " msec");
// System.out.println("NRT wait: " + waitForMsec + " ms");
}
t = System.currentTimeMillis();

View File

@ -24,6 +24,7 @@ import java.text.Collator;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
@ -126,8 +127,9 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
execBenchmark(algLines);
assertTrue(CountingSearchTestTask.numSearches > 0);
long elapsed =
(CountingSearchTestTask.prevLastNanos - CountingSearchTestTask.startNanos) / 1_000_000;
assertTrue("elapsed time was " + elapsed + " msec", elapsed <= 1500);
TimeUnit.NANOSECONDS.toMillis(
CountingSearchTestTask.prevLastNanos - CountingSearchTestTask.startNanos);
assertTrue("elapsed time was " + elapsed + " ms", elapsed <= 1500);
}
// disabled until we fix BG thread prio -- this test

View File

@ -16,6 +16,7 @@
*/
package org.apache.lucene.benchmark.byTask.tasks;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.benchmark.byTask.PerfRunData;
/** Test Search task which counts number of searches. */
@ -47,6 +48,6 @@ public class CountingSearchTestTask extends SearchTask {
}
public long getElapsedMillis() {
return lastNanos - startNanos / 1_000_000;
return TimeUnit.NANOSECONDS.toMillis(lastNanos - startNanos);
}
}

View File

@ -131,7 +131,11 @@ public final class Test20NewsgroupsClassification extends LuceneTestCase {
long endIndex = System.nanoTime();
System.out.println(
"Indexed " + docsIndexed + " docs in " + (endIndex - startIndex) / 1_000_000_000 + "s");
"Indexed "
+ docsIndexed
+ " docs in "
+ TimeUnit.NANOSECONDS.toSeconds(endIndex - startIndex)
+ "s");
indexWriter.close();
}
@ -161,7 +165,8 @@ public final class Test20NewsgroupsClassification extends LuceneTestCase {
reader.close();
reader = DirectoryReader.open(train); // using the train index from now on
long endSplit = System.nanoTime();
System.out.println("Splitting done in " + (endSplit - startSplit) / 1_000_000_000 + "s");
System.out.println(
"Splitting done in " + TimeUnit.NANOSECONDS.toSeconds(endSplit - startSplit) + "s");
}
classifiers.add(
@ -371,7 +376,7 @@ public final class Test20NewsgroupsClassification extends LuceneTestCase {
ar, classifier, CATEGORY_FIELD, BODY_FIELD, 60000 * 30);
}
final long endTime = System.nanoTime();
final int elapse = (int) (endTime - startTime) / 1_000_000_000;
final int elapse = (int) TimeUnit.NANOSECONDS.toSeconds(endTime - startTime);
return " * "
+ classifier

View File

@ -704,8 +704,8 @@ final class SimpleTextBKDWriter implements Closeable {
// If no exception, we should have cleaned everything up:
assert tempDir.getCreatedFiles().isEmpty();
// long t2 = System.nanoTime();
// System.out.println("write time: " + ((t2-t1)/1000000.0) + " msec");
// System.out.println("write time: " + ((System.nanoTime() - t1) / (double)
// TimeUnit.MILLISECONDS.toNanos(1)) + " ms");
success = true;
} finally {

View File

@ -24,6 +24,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.IOContext;
@ -267,7 +268,7 @@ final class BufferedUpdatesStream implements Accountable {
packetCount,
totalDelCount,
bytesUsed.get(),
(System.nanoTime() - startNS) / 1000000.));
(System.nanoTime() - startNS) / (double) TimeUnit.MILLISECONDS.toNanos(1)));
}
}

View File

@ -4190,7 +4190,7 @@ public final class CheckIndex implements Closeable {
}
private static double nsToSec(long ns) {
return ns / 1000000000.0;
return ns / (double) TimeUnit.SECONDS.toNanos(1);
}
/**

View File

@ -21,6 +21,7 @@ import java.io.UncheckedIOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.MergePolicy.OneMerge;
import org.apache.lucene.internal.tests.ConcurrentMergeSchedulerAccess;
import org.apache.lucene.internal.tests.TestSecrets;
@ -614,7 +615,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
}
if (verbose() && startStallTime != 0) {
message(" stalled for " + (System.currentTimeMillis() - startStallTime) + " msec");
message(" stalled for " + (System.currentTimeMillis() - startStallTime) + " ms");
}
return true;
@ -889,7 +890,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
protected void targetMBPerSecChanged() {}
private static double nsToSec(long ns) {
return ns / 1000000000.0;
return ns / (double) TimeUnit.SECONDS.toNanos(1);
}
private static double bytesToMB(long bytes) {

View File

@ -23,6 +23,7 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.Accountable;
@ -311,7 +312,7 @@ final class DocumentsWriterFlushControl implements Accountable, Closeable {
String.format(
Locale.ROOT,
"done stalling flushes for %.1f msec: netBytes: %.1f MB flushBytes: %.1f MB fullFlush: %b",
(System.nanoTime() - stallStartNS) / 1000000.,
(System.nanoTime() - stallStartNS) / (double) TimeUnit.MILLISECONDS.toNanos(1),
netBytes() / 1024. / 1024.,
getFlushingBytes() / 1024. / 1024.,
fullFlush));

View File

@ -25,6 +25,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.codecs.Codec;
@ -473,7 +474,10 @@ final class DocumentsWriterPerThread implements Accountable {
sealFlushedSegment(fs, sortMap, flushNotifications);
if (infoStream.isEnabled("DWPT")) {
infoStream.message(
"DWPT", "flush time " + ((System.nanoTime() - t0) / 1000000.0) + " msec");
"DWPT",
"flush time "
+ ((System.nanoTime() - t0) / (double) TimeUnit.MILLISECONDS.toNanos(1))
+ " ms");
}
return fs;
} catch (Throwable t) {

View File

@ -22,6 +22,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.IntConsumer;
import org.apache.lucene.search.DocIdSetIterator;
@ -216,7 +217,7 @@ final class FrozenBufferedUpdates {
String.format(
Locale.ROOT,
"applyDocValuesUpdates %.1f msec for %d segments, %d field updates; %d new updates",
(System.nanoTime() - startNS) / 1000000.,
(System.nanoTime() - startNS) / (double) TimeUnit.MILLISECONDS.toNanos(1),
segStates.length,
fieldUpdatesCount,
updateCount));
@ -430,7 +431,7 @@ final class FrozenBufferedUpdates {
String.format(
Locale.ROOT,
"applyQueryDeletes took %.2f msec for %d segments and %d queries; %d new deletions",
(System.nanoTime() - startNS) / 1000000.,
(System.nanoTime() - startNS) / (double) TimeUnit.MILLISECONDS.toNanos(1),
segStates.length,
deleteQueries.length,
delCount));
@ -493,7 +494,7 @@ final class FrozenBufferedUpdates {
String.format(
Locale.ROOT,
"applyTermDeletes took %.2f msec for %d segments and %d del terms; %d new deletions",
(System.nanoTime() - startNS) / 1000000.,
(System.nanoTime() - startNS) / (double) TimeUnit.MILLISECONDS.toNanos(1),
segStates.length,
deleteTerms.size(),
delCount));

View File

@ -30,6 +30,7 @@ import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
@ -605,7 +606,7 @@ final class IndexFileDeleter implements Closeable {
if (infoStream.isEnabled("IFD")) {
long t1 = System.nanoTime();
infoStream.message("IFD", ((t1 - t0) / 1000000) + " msec to checkpoint");
infoStream.message("IFD", TimeUnit.NANOSECONDS.toMillis(t1 - t0) + " ms to checkpoint");
}
}

View File

@ -696,8 +696,7 @@ public class IndexWriter
maybeMerge(config.getMergePolicy(), MergeTrigger.FULL_FLUSH, UNBOUNDED_MAX_MERGE_SEGMENTS);
}
if (infoStream.isEnabled("IW")) {
infoStream.message(
"IW", "getReader took " + (System.currentTimeMillis() - tStart) + " msec");
infoStream.message("IW", "getReader took " + (System.currentTimeMillis() - tStart) + " ms");
}
success2 = true;
} catch (VirtualMachineError tragedy) {
@ -867,7 +866,7 @@ public class IndexWriter
count,
readerPool.ramBytesUsed() / 1024. / 1024.,
ramBufferSizeMB,
((System.nanoTime() - startNS) / 1000000000.)));
((System.nanoTime() - startNS) / (double) TimeUnit.SECONDS.toNanos(1))));
}
}
}
@ -4130,7 +4129,7 @@ public class IndexWriter
String.format(
Locale.ROOT,
"commit: took %.1f msec",
(System.nanoTime() - startCommitTime) / 1000000.0));
(System.nanoTime() - startCommitTime) / (double) TimeUnit.MILLISECONDS.toNanos(1)));
infoStream.message("IW", "commit: done");
}
}
@ -4718,7 +4717,7 @@ public class IndexWriter
"IW",
"merge time "
+ (System.currentTimeMillis() - t0)
+ " msec for "
+ " ms for "
+ merge.info.info.maxDoc()
+ " docs");
}
@ -5154,7 +5153,7 @@ public class IndexWriter
String.format(
Locale.ROOT,
"%.1f sec %s",
e.getValue() / 1000000000.,
e.getValue() / (double) TimeUnit.SECONDS.toNanos(1),
e.getKey().name().toLowerCase(Locale.ROOT)))
.collect(Collectors.joining(", "));
if (!pauseInfo.isEmpty()) {
@ -5162,7 +5161,7 @@ public class IndexWriter
}
long t1 = System.nanoTime();
double sec = (t1 - merge.mergeStartNS) / 1000000000.;
double sec = (t1 - merge.mergeStartNS) / (double) TimeUnit.SECONDS.toNanos(1);
double segmentMB = (merge.info.sizeInBytes() / 1024. / 1024.);
infoStream.message(
"IW",
@ -6154,7 +6153,7 @@ public class IndexWriter
this,
segStates.length,
delCount,
(System.nanoTime() - iterStartNS) / 1000000000.));
(System.nanoTime() - iterStartNS) / (double) TimeUnit.SECONDS.toNanos(1)));
}
if (updates.privateSegment != null) {
// No need to retry for a segment-private packet: the merge that folds in our private
@ -6211,7 +6210,7 @@ public class IndexWriter
this,
totalSegmentCount,
totalDelCount,
(System.nanoTime() - startNS) / 1000000000.);
(System.nanoTime() - startNS) / (double) TimeUnit.SECONDS.toNanos(1));
if (iter > 0) {
message += "; " + (iter + 1) + " iters due to concurrent merges";
}

View File

@ -25,6 +25,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
@ -241,7 +242,8 @@ final class IndexingChain implements Accountable {
long t0 = System.nanoTime();
writeNorms(state, sortMap);
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", ((System.nanoTime() - t0) / 1000000) + " msec to write norms");
infoStream.message(
"IW", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0) + " ms to write norms");
}
SegmentReadState readState =
new SegmentReadState(
@ -254,19 +256,22 @@ final class IndexingChain implements Accountable {
t0 = System.nanoTime();
writeDocValues(state, sortMap);
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", ((System.nanoTime() - t0) / 1000000) + " msec to write docValues");
infoStream.message(
"IW", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0) + " ms to write docValues");
}
t0 = System.nanoTime();
writePoints(state, sortMap);
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", ((System.nanoTime() - t0) / 1000000) + " msec to write points");
infoStream.message(
"IW", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0) + " ms to write points");
}
t0 = System.nanoTime();
vectorValuesConsumer.flush(state, sortMap);
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", ((System.nanoTime() - t0) / 1000000) + " msec to write vectors");
infoStream.message(
"IW", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0) + " ms to write vectors");
}
// it's possible all docs hit non-aborting exceptions...
@ -275,7 +280,8 @@ final class IndexingChain implements Accountable {
storedFieldsConsumer.flush(state, sortMap);
if (infoStream.isEnabled("IW")) {
infoStream.message(
"IW", ((System.nanoTime() - t0) / 1000000) + " msec to finish stored fields");
"IW",
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0) + " ms to finish stored fields");
}
t0 = System.nanoTime();
@ -304,7 +310,8 @@ final class IndexingChain implements Accountable {
if (infoStream.isEnabled("IW")) {
infoStream.message(
"IW",
((System.nanoTime() - t0) / 1000000) + " msec to write postings and finish vectors");
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0)
+ " ms to write postings and finish vectors");
}
// Important to save after asking consumer to flush so
@ -317,7 +324,8 @@ final class IndexingChain implements Accountable {
.fieldInfosFormat()
.write(state.directory, state.segmentInfo, "", state.fieldInfos, IOContext.DEFAULT);
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", ((System.nanoTime() - t0) / 1000000) + " msec to write fieldInfos");
infoStream.message(
"IW", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0) + " ms to write fieldInfos");
}
return sortMap;

View File

@ -21,6 +21,7 @@ import static org.apache.lucene.index.IndexWriter.isCongruentSort;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.KnnVectorsReader;
@ -215,7 +216,9 @@ public class MergeState {
infoStream.message(
"SM",
String.format(
Locale.ROOT, "%.2f msec to build merge sorted DocMaps", (t1 - t0) / 1000000.0));
Locale.ROOT,
"%.2f msec to build merge sorted DocMaps",
(t1 - t0) / (double) TimeUnit.MILLISECONDS.toNanos(1)));
}
return result;
}

View File

@ -27,6 +27,7 @@ import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
@ -701,7 +702,7 @@ final class ReadersAndUpdates {
Locale.ROOT,
"done write field updates for seg=%s; took %.3fs; new files: %s",
info,
(System.nanoTime() - startTimeNS) / 1000000000.0,
(System.nanoTime() - startTimeNS) / (double) TimeUnit.SECONDS.toNanos(1),
newDVFiles));
}
return true;

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.codecs.FieldsConsumer;
@ -271,10 +272,14 @@ final class SegmentMerger {
}
int numMerged = merger.merge();
if (mergeState.infoStream.isEnabled("SM")) {
long t1 = System.nanoTime();
mergeState.infoStream.message(
"SM",
((t1 - t0) / 1000000) + " msec to merge " + formatName + " [" + numMerged + " docs]");
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0)
+ " ms to merge "
+ formatName
+ " ["
+ numMerged
+ " docs]");
}
return numMerged;
}
@ -291,11 +296,16 @@ final class SegmentMerger {
t0 = System.nanoTime();
}
merger.merge(segmentWriteState, segmentReadState);
long t1 = System.nanoTime();
if (mergeState.infoStream.isEnabled("SM")) {
long t1 = System.nanoTime();
mergeState.infoStream.message(
"SM",
((t1 - t0) / 1000000) + " msec to merge " + formatName + " [" + numMerged + " docs]");
TimeUnit.NANOSECONDS.toMillis(t1 - t0)
+ " ms to merge "
+ formatName
+ " ["
+ numMerged
+ " docs]");
}
}
}

View File

@ -18,6 +18,7 @@ package org.apache.lucene.search;
import java.io.Closeable;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.lucene.index.IndexWriter;
@ -162,13 +163,13 @@ public class ControlledRealTimeReopenThread<T> extends Thread implements Closeab
reopenLock.unlock();
}
long startMS = System.nanoTime() / 1000000;
long startMS = TimeUnit.NANOSECONDS.toMillis(System.nanoTime());
while (targetGen > searchingGen) {
if (maxMS < 0) {
wait();
} else {
long msLeft = (startMS + maxMS) - System.nanoTime() / 1000000;
long msLeft = (startMS + maxMS) - TimeUnit.NANOSECONDS.toMillis(System.nanoTime());
if (msLeft <= 0) {
return false;
} else {

View File

@ -988,8 +988,8 @@ public class BKDWriter implements Closeable {
// If no exception, we should have cleaned everything up:
assert tempDir.getCreatedFiles().isEmpty();
// long t2 = System.nanoTime();
// System.out.println("write time: " + ((t2-t1)/1000000.0) + " msec");
// System.out.println("write time: " + ((System.nanoTime() - t1) / (double)
// TimeUnit.SECONDS.toNanos(1)) + " ms");
success = true;
} finally {

View File

@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import java.util.SplittableRandom;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.RandomAccessVectorValues;
import org.apache.lucene.index.VectorEncoding;
import org.apache.lucene.index.VectorSimilarityFunction;
@ -209,8 +210,8 @@ public final class HnswGraphBuilder<T> {
Locale.ROOT,
"built %d in %d/%d ms",
node,
((now - t) / 1_000_000),
((now - start) / 1_000_000)));
TimeUnit.NANOSECONDS.toMillis(now - t),
TimeUnit.NANOSECONDS.toMillis(now - start)));
return now;
}

View File

@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
@ -202,7 +203,12 @@ public class Test2BTerms extends LuceneTestCase {
final long t0 = System.nanoTime();
w.addDocument(doc);
System.out.println(
i + " of " + numDocs + " " + (System.nanoTime() - t0) / 1_000_000 + " msec");
i
+ " of "
+ numDocs
+ " "
+ TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0)
+ " ms");
}
savedTerms = ts.savedTerms;
@ -266,8 +272,7 @@ public class Test2BTerms extends LuceneTestCase {
System.out.println(" FAILED: count=" + count);
failed = true;
}
final long t1 = System.nanoTime();
System.out.println(" took " + (t1 - t0) / 1_000_000 + " millis");
System.out.println(" took " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0) + " ms");
TermsEnum.SeekStatus result = termsEnum.seekCeil(term);
if (result != TermsEnum.SeekStatus.FOUND) {

View File

@ -297,7 +297,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
+ SECONDS
+ " seconds ("
+ (lastDeleteTime - modTime)
+ " msec) but did not get deleted ",
+ " ms) but did not get deleted ",
lastDeleteTime - modTime <= leeway);
} catch (
@SuppressWarnings("unused")

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@ -118,7 +119,10 @@ public class TestTermdocPerf extends LuceneTestCase {
long end = System.nanoTime();
if (VERBOSE)
System.out.println(
"milliseconds for creation of " + ndocs + " docs = " + (end - start) / 1_000_000);
"milliseconds for creation of "
+ ndocs
+ " docs = "
+ TimeUnit.NANOSECONDS.toMillis(end - start));
IndexReader reader = DirectoryReader.open(dir);
@ -140,7 +144,10 @@ public class TestTermdocPerf extends LuceneTestCase {
end = System.nanoTime();
if (VERBOSE)
System.out.println(
"milliseconds for " + iter + " TermDocs iteration: " + (end - start) / 1_000_000);
"milliseconds for "
+ iter
+ " TermDocs iteration: "
+ TimeUnit.NANOSECONDS.toMillis(end - start));
return ret;
}

View File

@ -20,6 +20,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.MultiReader;
@ -110,7 +111,8 @@ public class TestShardSearching extends ShardSearchingTestBase {
if (VERBOSE) {
System.out.println(
"\nTEST: follow-on query age="
+ ((System.nanoTime() - prevSearchState.searchTimeNanos) / 1000000000.0));
+ ((System.nanoTime() - prevSearchState.searchTimeNanos)
/ (double) TimeUnit.SECONDS.toNanos(1)));
}
try {

View File

@ -17,6 +17,7 @@
package org.apache.lucene.store;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.RateLimiter.SimpleRateLimiter;
import org.apache.lucene.tests.util.LuceneTestCase;
@ -85,7 +86,9 @@ public final class TestRateLimiter extends LuceneTestCase {
thread.join();
}
long endNS = System.nanoTime();
double actualMBPerSec = (totBytes.get() / 1024 / 1024.) / ((endNS - startNS) / 1000000000.0);
double actualMBPerSec =
(totBytes.get() / 1024 / 1024.)
/ ((endNS - startNS) / (double) TimeUnit.SECONDS.toNanos(1));
// TODO: this may false trip .... could be we can only assert that it never exceeds the max, so
// slow jenkins doesn't trip:

View File

@ -18,6 +18,7 @@ package org.apache.lucene.util;
import java.util.Locale;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.util.BaseSortTestCase.Entry;
import org.apache.lucene.util.BaseSortTestCase.Strategy;
@ -112,8 +113,8 @@ public class SelectorBenchmark {
k -= clone.length;
}
}
long timeMs = (System.nanoTime() - startTimeNs) / 1000000;
System.out.printf(Locale.ROOT, "%5d", timeMs);
System.out.printf(
Locale.ROOT, "%5d", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNs));
}
}

View File

@ -18,6 +18,7 @@ package org.apache.lucene.util;
import java.util.Locale;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.util.BaseSortTestCase.Entry;
import org.apache.lucene.util.BaseSortTestCase.Strategy;
@ -98,8 +99,8 @@ public class SorterBenchmark {
System.arraycopy(original, 0, clone, 0, original.length);
sorter.sort(0, clone.length);
}
long timeMs = (System.nanoTime() - startTimeNs) / 1000000;
System.out.printf(Locale.ROOT, "%5d", timeMs);
System.out.printf(
Locale.ROOT, "%5d", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeNs));
}
}

View File

@ -30,6 +30,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.DataInput;
@ -270,21 +271,24 @@ public class TestFSTDirectAddressing extends LuceneTestCase {
long startTimeMs = System.nanoTime();
FST<CharsRef> originalFst = new FST<>(in, in, CharSequenceOutputs.getSingleton());
long endTimeMs = System.nanoTime();
System.out.println("time = " + (endTimeMs - startTimeMs) / 100_000 + " ms");
System.out.println(
"time = " + TimeUnit.NANOSECONDS.toMillis(endTimeMs - startTimeMs) + " ms");
for (float oversizingFactor : List.of(0f, 0f, 0f, 1f, 1f, 1f)) {
System.out.println("\nFST construction (oversizingFactor=" + oversizingFactor + ")");
startTimeMs = System.nanoTime();
FST<CharsRef> fst = recompile(originalFst, oversizingFactor);
endTimeMs = System.nanoTime();
System.out.println("time = " + (endTimeMs - startTimeMs) / 100_000 + " ms");
System.out.println(
"time = " + TimeUnit.NANOSECONDS.toMillis(endTimeMs - startTimeMs) + " ms");
System.out.println("FST RAM = " + fst.ramBytesUsed() + " B");
System.out.println("FST enum");
startTimeMs = System.nanoTime();
walk(fst);
endTimeMs = System.nanoTime();
System.out.println("time = " + (endTimeMs - startTimeMs) / 100_000 + " ms");
System.out.println(
"time = " + TimeUnit.NANOSECONDS.toMillis(endTimeMs - startTimeMs) + " ms");
}
}
}

View File

@ -40,6 +40,7 @@ import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -543,7 +544,7 @@ public class TestFSTs extends LuceneTestCase {
System.out.printf(
Locale.ROOT,
"%6.2fs: %9d...",
((System.nanoTime() - tStart) / 1_000_000_000.0),
(System.nanoTime() - tStart) / (double) TimeUnit.SECONDS.toNanos(1),
ord);
}
if (ord >= limit) {
@ -552,12 +553,14 @@ public class TestFSTs extends LuceneTestCase {
}
long tMid = System.nanoTime();
System.out.println(((tMid - tStart) / 1_000_000_000.0) + " sec to add all terms");
System.out.println(
((tMid - tStart) / (double) TimeUnit.SECONDS.toNanos(1)) + " sec to add all terms");
assert fstCompiler.getTermCount() == ord;
FST<T> fst = fstCompiler.compile();
long tEnd = System.nanoTime();
System.out.println(((tEnd - tMid) / 1_000_000_000.0) + " sec to finish/pack");
System.out.println(
((tEnd - tMid) / (double) TimeUnit.SECONDS.toNanos(1)) + " sec to finish/pack");
if (fst == null) {
System.out.println("FST was fully pruned!");
System.exit(0);
@ -622,14 +625,17 @@ public class TestFSTs extends LuceneTestCase {
ord++;
if (ord % 500000 == 0) {
System.out.println(
((System.nanoTime() - tStart) / 1_000_000_000.0) + "s: " + ord + "...");
(System.nanoTime() - tStart) / (double) TimeUnit.SECONDS.toNanos(1)
+ "sec: "
+ ord
+ "...");
}
if (ord >= limit) {
break;
}
}
double totSec = ((System.nanoTime() - tStart) / 1_000_000_000.0);
double totSec = (System.nanoTime() - tStart) / (double) TimeUnit.SECONDS.toNanos(1);
System.out.println(
"Verify took "
+ totSec

View File

@ -36,6 +36,7 @@ import java.util.HashSet;
import java.util.Locale;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.codecs.KnnVectorsFormat;
import org.apache.lucene.codecs.KnnVectorsReader;
import org.apache.lucene.codecs.lucene94.Lucene94Codec;
@ -401,8 +402,9 @@ public class KnnGraphTester {
}
}
}
totalCpuTime = (bean.getCurrentThreadCpuTime() - cpuTimeStartNs) / 1_000_000;
elapsed = (System.nanoTime() - start) / 1_000_000; // ns -> ms
totalCpuTime =
TimeUnit.NANOSECONDS.toMillis(bean.getCurrentThreadCpuTime() - cpuTimeStartNs);
elapsed = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start); // ns -> ms
for (int i = 0; i < numIters; i++) {
totalVisited += results[i].totalHits.value;
for (ScoreDoc doc : results[i].scoreDocs) {
@ -728,9 +730,10 @@ public class KnnGraphTester {
}
long elapsed = System.nanoTime() - start;
if (quiet == false) {
System.out.println("Indexed " + numDocs + " documents in " + elapsed / 1_000_000_000 + "s");
System.out.println(
"Indexed " + numDocs + " documents in " + TimeUnit.NANOSECONDS.toSeconds(elapsed) + "s");
}
return (int) (elapsed / 1_000_000);
return (int) TimeUnit.NANOSECONDS.toMillis(elapsed);
}
private static void usage() {

View File

@ -174,7 +174,7 @@ public class IndexFiles implements AutoCloseable {
+ reader.numDocs()
+ " documents in "
+ (end.getTime() - start.getTime())
+ " milliseconds");
+ " ms");
if (reader.numDocs() > 100
&& vectorDictSize < 1_000_000
&& System.getProperty("smoketester") == null) {

View File

@ -23,6 +23,7 @@ import java.awt.*;
import java.lang.invoke.MethodHandles;
import java.util.Arrays;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.*;
@ -94,7 +95,7 @@ public class LukeMain {
javax.swing.SwingUtilities.invokeLater(
() -> {
try {
long _start = System.nanoTime() / 1_000_000;
long _start = System.nanoTime();
guiThreadResult.put(createGUI());
// Show the initial dialog.
@ -107,7 +108,10 @@ public class LukeMain {
(factory) -> {});
long _end = System.nanoTime() / 1_000_000;
log.info("Elapsed time for initializing GUI: " + (_end - _start) + "msec");
log.info(
"Elapsed time for initializing GUI: "
+ TimeUnit.NANOSECONDS.toMillis(_end - _start)
+ " ms");
} catch (Exception e) {
throw new RuntimeException(e);
}

View File

@ -20,6 +20,7 @@ package org.apache.lucene.replicator.nrt;
import java.io.Closeable;
import java.io.IOException;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.IOContext;
@ -134,7 +135,7 @@ public class CopyOneFile implements Closeable {
"file %s: done copying [%s, %.3fms]",
name,
Node.bytesToString(metaData.length),
(System.nanoTime() - copyStartNS) / 1000000.0));
(System.nanoTime() - copyStartNS) / (double) TimeUnit.MILLISECONDS.toNanos(1)));
}
return true;

View File

@ -25,6 +25,7 @@ import java.io.PrintStream;
import java.nio.file.NoSuchFileException;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DirectoryReader;
@ -121,8 +122,8 @@ public abstract class Node implements Closeable {
String.format(
Locale.ROOT,
"%5.3fs %5.1fs: [%11s] %s",
(now - globalStartNS) / 1000000000.,
(now - localStartNS) / 1000000000.,
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
Thread.currentThread().getName(),
message));
}
@ -135,8 +136,8 @@ public abstract class Node implements Closeable {
String.format(
Locale.ROOT,
"%5.3fs %5.1fs: N%d [%11s] %s",
(now - globalStartNS) / 1000000000.,
(now - localStartNS) / 1000000000.,
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
id,
Thread.currentThread().getName(),
message));
@ -150,8 +151,8 @@ public abstract class Node implements Closeable {
String.format(
Locale.ROOT,
"%5.3fs %5.1fs: %7s %2s [%11s] %s",
(now - globalStartNS) / 1000000000.,
(now - localStartNS) / 1000000000.,
(now - globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
state,
name(),
Thread.currentThread().getName(),

View File

@ -26,6 +26,7 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.SegmentCommitInfo;
@ -60,7 +61,7 @@ class PreCopyMergedSegmentWarmer implements IndexReaderWarmer {
String.format(
Locale.ROOT,
"top: done warm merge " + info + ": took %.3f sec, %.1f MB",
(System.nanoTime() - startNS) / 1000000000.,
(System.nanoTime() - startNS) / (double) TimeUnit.SECONDS.toNanos(1),
info.sizeInBytes() / 1024 / 1024.));
primary.finishedMergedFiles.addAll(filesMetaData.keySet());
}

View File

@ -33,6 +33,7 @@ import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexWriter;
@ -298,7 +299,7 @@ public abstract class ReplicaNode extends Node {
Locale.ROOT,
"top: %d: start: done sync: took %.3fs for %s, opened NRT reader version=%d",
id,
(System.nanoTime() - initSyncStartNS) / 1000000000.0,
(System.nanoTime() - initSyncStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
bytesToString(job.getTotalBytesCopied()),
job.getCopyState().version));
@ -491,7 +492,7 @@ public abstract class ReplicaNode extends Node {
String.format(
Locale.ROOT,
"top: done sync: took %.3fs for %s, opened NRT reader version=%d markerCount=%d",
(System.nanoTime() - startNS) / 1000000000.0,
(System.nanoTime() - startNS) / (double) TimeUnit.SECONDS.toNanos(1),
bytesToString(job.getTotalBytesCopied()),
copyState.version,
markerCount));

View File

@ -23,6 +23,7 @@ import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.util.IOUtils;
/**
@ -151,7 +152,7 @@ class SimpleCopyJob extends CopyJob {
String.format(
Locale.ROOT,
"top: file copy done; took %.1f msec to copy %d bytes; now rename %d tmp files",
(System.nanoTime() - startNS) / 1000000.0,
(System.nanoTime() - startNS) / (double) TimeUnit.MILLISECONDS.toNanos(1),
totBytesCopied,
copiedFiles.size()));

View File

@ -34,6 +34,7 @@ import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -245,7 +246,7 @@ class SimplePrimaryNode extends PrimaryNode {
+ " to "
+ preCopy.connections.size()
+ " replicas for %.1f sec...",
(ns - startNS) / 1000000000.0));
(ns - startNS) / (double) TimeUnit.SECONDS.toNanos(1)));
lastWarnNS = ns;
}

View File

@ -26,6 +26,7 @@ import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern;
@ -1000,7 +1001,7 @@ public class TestNRTReplication extends LuceneTestCase {
String.format(
Locale.ROOT,
"%5.3fs : parent [%11s] %s",
(now - Node.globalStartNS) / 1000000000.,
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
Thread.currentThread().getName(),
message));
}

View File

@ -39,6 +39,7 @@ import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
@ -526,8 +527,8 @@ public class TestStressNRTReplication extends LuceneTestCase {
long t1 = System.nanoTime();
message(
"top: done translog replay; took "
+ ((t1 - t0) / 1000000.0)
+ " msec; now publish primary");
+ ((t1 - t0) / (double) TimeUnit.MILLISECONDS.toNanos(1))
+ " ms; now publish primary");
// Publish new primary only after translog has succeeded in replaying; this is important, for
// this test anyway, so we keep a "linear"
@ -901,7 +902,7 @@ public class TestStressNRTReplication extends LuceneTestCase {
long nowNS = System.nanoTime();
for (int i = 0; i < nodes.length; i++) {
b.append(' ');
double sec = (nowNS - nodeTimeStamps[i]) / 1000000000.0;
double sec = (nowNS - nodeTimeStamps[i]) / (double) TimeUnit.SECONDS.toNanos(1);
String prefix;
if (nodes[i] == null) {
downNodes.add(i);
@ -1293,7 +1294,7 @@ public class TestStressNRTReplication extends LuceneTestCase {
if (random().nextInt(100) == 17) {
int pauseMS = TestUtil.nextInt(random(), 500, 2000);
System.out.println("Indexer: now pause for " + pauseMS + " msec...");
System.out.println("Indexer: now pause for " + pauseMS + " ms...");
Thread.sleep(pauseMS);
System.out.println("Indexer: done pause for a bit...");
}
@ -1335,7 +1336,7 @@ public class TestStressNRTReplication extends LuceneTestCase {
String.format(
Locale.ROOT,
"%5.3fs : parent [%11s] %s",
(now - Node.globalStartNS) / 1000000000.,
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
Thread.currentThread().getName(),
message));
}
@ -1346,8 +1347,8 @@ public class TestStressNRTReplication extends LuceneTestCase {
String.format(
Locale.ROOT,
"%5.3fs %5.1fs: parent [%11s] %s",
(now - Node.globalStartNS) / 1000000000.,
(now - localStartNS) / 1000000000.,
(now - Node.globalStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
(now - localStartNS) / (double) TimeUnit.SECONDS.toNanos(1),
Thread.currentThread().getName(),
message));
}

View File

@ -751,7 +751,7 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
mgr.release(searcher);
}
// System.out.println((System.currentTimeMillis() - t0) + " msec for infix suggest");
// System.out.println((System.currentTimeMillis() - t0) + " ms for infix suggest");
// System.out.println(results);
return results;

View File

@ -583,7 +583,7 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
"TEST: DONE start "
+ NUM_INDEX_THREADS
+ " indexing threads ["
+ (System.nanoTime() - t0) / 1_000_000
+ TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0)
+ " ms]");
}
@ -594,7 +594,9 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
if (VERBOSE) {
System.out.println(
"TEST: all searching done [" + (System.nanoTime() - t0) / 1_000_000 + " ms]");
"TEST: all searching done ["
+ TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0)
+ " ms]");
}
for (Thread thread : indexThreads) {
@ -604,7 +606,7 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
if (VERBOSE) {
System.out.println(
"TEST: done join indexing threads ["
+ (System.nanoTime() - t0) / 1_000_000
+ TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0)
+ " ms]; addCount="
+ addCount
+ " delCount="
@ -765,7 +767,8 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
dir.close();
if (VERBOSE) {
System.out.println("TEST: done [" + (System.nanoTime() - t0) / 1_000_000 + " ms]");
System.out.println(
"TEST: done [" + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0) + " ms]");
}
}

View File

@ -17,6 +17,7 @@
package org.apache.lucene.tests.util;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ThreadInterruptedException;
@ -117,7 +118,7 @@ public class ThrottledIndexOutput extends IndexOutput {
long actualBps = (timeElapsed / pendingBytes) * 1000000000l; // nano to sec
if (actualBps > bytesPerSecond) {
long expected = (pendingBytes * 1000l / bytesPerSecond);
final long delay = expected - (timeElapsed / 1000000l);
final long delay = expected - TimeUnit.NANOSECONDS.toMillis(timeElapsed);
pendingBytes = 0;
timeElapsed = 0;
return delay;