mirror of https://github.com/apache/lucene.git
SOLR-12091: Rename TimeSource.getTime to getTimeNs.
This commit is contained in:
parent
f2a45790f6
commit
0dfe19880c
|
@ -77,6 +77,8 @@ Other Changes
|
|||
* SOLR-12090: Move DistribStateManager, NodeStateProvider and SolrCloudManager interfaces out of the
|
||||
autoscaling package. (shalin)
|
||||
|
||||
* SOLR-12091: Rename TimeSource.getTime to getTimeNs. (ab)
|
||||
|
||||
================== 7.3.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
|
|
@ -60,14 +60,14 @@ public class ActionThrottle {
|
|||
}
|
||||
|
||||
public void markAttemptingAction() {
|
||||
lastActionStartedAt = timeSource.getTime();
|
||||
lastActionStartedAt = timeSource.getTimeNs();
|
||||
}
|
||||
|
||||
public void minimumWaitBetweenActions() {
|
||||
if (lastActionStartedAt == null) {
|
||||
return;
|
||||
}
|
||||
long diff = timeSource.getTime() - lastActionStartedAt;
|
||||
long diff = timeSource.getTimeNs() - lastActionStartedAt;
|
||||
int diffMs = (int) TimeUnit.MILLISECONDS.convert(diff, TimeUnit.NANOSECONDS);
|
||||
long minNsBetweenActions = TimeUnit.NANOSECONDS.convert(minMsBetweenActions, TimeUnit.MILLISECONDS);
|
||||
log.debug("The last {} attempt started {}ms ago.", name, diffMs);
|
||||
|
|
|
@ -369,7 +369,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
|
|||
if (timeout != null) {
|
||||
try {
|
||||
int timeoutSeconds = parseHumanTime(timeout);
|
||||
resumeTime = new Date(TimeUnit.MILLISECONDS.convert(timeSource.getTime(), TimeUnit.NANOSECONDS)
|
||||
resumeTime = new Date(TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs(), TimeUnit.NANOSECONDS)
|
||||
+ TimeUnit.MILLISECONDS.convert(timeoutSeconds, TimeUnit.SECONDS));
|
||||
} catch (IllegalArgumentException e) {
|
||||
op.addError("Invalid 'timeout' value for suspend trigger: " + triggerName);
|
||||
|
|
|
@ -80,7 +80,7 @@ public class InactiveShardPlanAction extends TriggerActionBase {
|
|||
}
|
||||
long timestamp = Long.parseLong(tstampStr);
|
||||
// this timestamp uses epoch time
|
||||
long currentTime = cloudManager.getTimeSource().getEpochTime();
|
||||
long currentTime = cloudManager.getTimeSource().getEpochTimeNs();
|
||||
long delta = TimeUnit.NANOSECONDS.toSeconds(currentTime - timestamp);
|
||||
log.debug("{}/{}: tstamp={}, time={}, delta={}", coll.getName(), s.getName(), timestamp, currentTime, delta);
|
||||
if (delta > cleanupTTL) {
|
||||
|
|
|
@ -144,7 +144,7 @@ public class MetricTrigger extends TriggerBase {
|
|||
values.forEach((tag, rate) -> rates.computeIfAbsent(node, s -> (Number) rate));
|
||||
}
|
||||
|
||||
long now = cloudManager.getTimeSource().getTime();
|
||||
long now = cloudManager.getTimeSource().getTimeNs();
|
||||
// check for exceeded rates and filter out those with less than waitFor from previous events
|
||||
Map<String, Number> hotNodes = rates.entrySet().stream()
|
||||
.filter(entry -> waitForElapsed(entry.getKey(), now, lastNodeEvent))
|
||||
|
|
|
@ -67,7 +67,7 @@ public class NodeAddedTrigger extends TriggerBase {
|
|||
// don't add nodes that have since gone away
|
||||
if (lastLiveNodes.contains(n)) {
|
||||
log.debug("Adding node from marker path: {}", n);
|
||||
nodeNameVsTimeAdded.put(n, cloudManager.getTimeSource().getTime());
|
||||
nodeNameVsTimeAdded.put(n, cloudManager.getTimeSource().getTimeNs());
|
||||
}
|
||||
removeMarker(n);
|
||||
});
|
||||
|
@ -138,7 +138,7 @@ public class NodeAddedTrigger extends TriggerBase {
|
|||
Set<String> copyOfNew = new HashSet<>(newLiveNodes);
|
||||
copyOfNew.removeAll(lastLiveNodes);
|
||||
copyOfNew.forEach(n -> {
|
||||
long eventTime = cloudManager.getTimeSource().getTime();
|
||||
long eventTime = cloudManager.getTimeSource().getTimeNs();
|
||||
log.debug("Tracking new node: {} at time {}", n, eventTime);
|
||||
nodeNameVsTimeAdded.put(n, eventTime);
|
||||
});
|
||||
|
@ -150,7 +150,7 @@ public class NodeAddedTrigger extends TriggerBase {
|
|||
Map.Entry<String, Long> entry = it.next();
|
||||
String nodeName = entry.getKey();
|
||||
Long timeAdded = entry.getValue();
|
||||
long now = cloudManager.getTimeSource().getTime();
|
||||
long now = cloudManager.getTimeSource().getTimeNs();
|
||||
if (TimeUnit.SECONDS.convert(now - timeAdded, TimeUnit.NANOSECONDS) >= getWaitForSecond()) {
|
||||
nodeNames.add(nodeName);
|
||||
times.add(timeAdded);
|
||||
|
@ -160,7 +160,7 @@ public class NodeAddedTrigger extends TriggerBase {
|
|||
if (!nodeNames.isEmpty()) {
|
||||
if (processor != null) {
|
||||
log.debug("NodeAddedTrigger {} firing registered processor for nodes: {} added at times {}, now={}", name,
|
||||
nodeNames, times, cloudManager.getTimeSource().getTime());
|
||||
nodeNames, times, cloudManager.getTimeSource().getTimeNs());
|
||||
if (processor.process(new NodeAddedEvent(getEventType(), getName(), times, nodeNames))) {
|
||||
// remove from tracking set only if the fire was accepted
|
||||
nodeNames.forEach(n -> {
|
||||
|
|
|
@ -65,7 +65,7 @@ public class NodeLostTrigger extends TriggerBase {
|
|||
// don't add nodes that have since came back
|
||||
if (!lastLiveNodes.contains(n)) {
|
||||
log.debug("Adding lost node from marker path: {}", n);
|
||||
nodeNameVsTimeRemoved.put(n, cloudManager.getTimeSource().getTime());
|
||||
nodeNameVsTimeRemoved.put(n, cloudManager.getTimeSource().getTimeNs());
|
||||
}
|
||||
removeMarker(n);
|
||||
});
|
||||
|
@ -135,7 +135,7 @@ public class NodeLostTrigger extends TriggerBase {
|
|||
copyOfLastLiveNodes.removeAll(newLiveNodes);
|
||||
copyOfLastLiveNodes.forEach(n -> {
|
||||
log.debug("Tracking lost node: {}", n);
|
||||
nodeNameVsTimeRemoved.put(n, cloudManager.getTimeSource().getTime());
|
||||
nodeNameVsTimeRemoved.put(n, cloudManager.getTimeSource().getTimeNs());
|
||||
});
|
||||
|
||||
// has enough time expired to trigger events for a node?
|
||||
|
@ -145,7 +145,7 @@ public class NodeLostTrigger extends TriggerBase {
|
|||
Map.Entry<String, Long> entry = it.next();
|
||||
String nodeName = entry.getKey();
|
||||
Long timeRemoved = entry.getValue();
|
||||
long now = cloudManager.getTimeSource().getTime();
|
||||
long now = cloudManager.getTimeSource().getTimeNs();
|
||||
if (TimeUnit.SECONDS.convert(now - timeRemoved, TimeUnit.NANOSECONDS) >= getWaitForSecond()) {
|
||||
nodeNames.add(nodeName);
|
||||
times.add(timeRemoved);
|
||||
|
|
|
@ -77,7 +77,7 @@ public class ScheduledTrigger extends TriggerBase {
|
|||
|
||||
// attempt parsing to validate date math strings
|
||||
// explicitly set NOW because it may be different for simulated time
|
||||
Date now = new Date(TimeUnit.NANOSECONDS.toMillis(cloudManager.getTimeSource().getEpochTime()));
|
||||
Date now = new Date(TimeUnit.NANOSECONDS.toMillis(cloudManager.getTimeSource().getEpochTimeNs()));
|
||||
Instant startTime = parseStartTime(now, startTimeStr, timeZoneStr);
|
||||
DateMathParser.parseMath(now, startTime + everyStr, timeZone);
|
||||
DateMathParser.parseMath(now, startTime + graceDurationStr, timeZone);
|
||||
|
@ -162,7 +162,7 @@ public class ScheduledTrigger extends TriggerBase {
|
|||
}
|
||||
|
||||
Instant now = Instant.ofEpochMilli(
|
||||
TimeUnit.NANOSECONDS.toMillis(timeSource.getEpochTime()));
|
||||
TimeUnit.NANOSECONDS.toMillis(timeSource.getEpochTimeNs()));
|
||||
AutoScaling.TriggerEventProcessor processor = processorRef.get();
|
||||
|
||||
if (now.isBefore(nextRunTime)) {
|
||||
|
@ -176,7 +176,7 @@ public class ScheduledTrigger extends TriggerBase {
|
|||
}
|
||||
// Even though we are skipping the event, we need to notify any listeners of the IGNORED stage
|
||||
// so we create a dummy event with the ignored=true flag and ScheduledTriggers will do the rest
|
||||
if (processor != null && processor.process(new ScheduledEvent(getEventType(), getName(), timeSource.getTime(),
|
||||
if (processor != null && processor.process(new ScheduledEvent(getEventType(), getName(), timeSource.getTimeNs(),
|
||||
preferredOp, now.toEpochMilli(), true))) {
|
||||
lastRunAt = nextRunTime;
|
||||
return;
|
||||
|
@ -188,7 +188,7 @@ public class ScheduledTrigger extends TriggerBase {
|
|||
log.debug("ScheduledTrigger {} firing registered processor for scheduled time {}, now={}", name,
|
||||
nextRunTime, now);
|
||||
}
|
||||
if (processor.process(new ScheduledEvent(getEventType(), getName(), timeSource.getTime(),
|
||||
if (processor.process(new ScheduledEvent(getEventType(), getName(), timeSource.getTimeNs(),
|
||||
preferredOp, now.toEpochMilli()))) {
|
||||
lastRunAt = nextRunTime; // set to nextRunTime instead of now to avoid drift
|
||||
}
|
||||
|
|
|
@ -135,7 +135,7 @@ public class ScheduledTriggers implements Closeable {
|
|||
queueStats = new Stats();
|
||||
listeners = new TriggerListeners();
|
||||
// initialize cooldown timer
|
||||
cooldownStart.set(cloudManager.getTimeSource().getTime() - cooldownPeriod.get());
|
||||
cooldownStart.set(cloudManager.getTimeSource().getTimeNs() - cooldownPeriod.get());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -185,7 +185,7 @@ public class ScheduledTriggers implements Closeable {
|
|||
|
||||
this.autoScalingConfig = autoScalingConfig;
|
||||
// reset cooldown
|
||||
cooldownStart.set(cloudManager.getTimeSource().getTime() - cooldownPeriod.get());
|
||||
cooldownStart.set(cloudManager.getTimeSource().getTimeNs() - cooldownPeriod.get());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -259,7 +259,7 @@ public class ScheduledTriggers implements Closeable {
|
|||
}
|
||||
// even though we pause all triggers during action execution there is a possibility that a trigger was already
|
||||
// running at the time and would have already created an event so we reject such events during cooldown period
|
||||
if (cooldownStart.get() + cooldownPeriod.get() > cloudManager.getTimeSource().getTime()) {
|
||||
if (cooldownStart.get() + cooldownPeriod.get() > cloudManager.getTimeSource().getTimeNs()) {
|
||||
log.debug("-------- Cooldown period - rejecting event: " + event);
|
||||
event.getProperties().put(TriggerEvent.COOLDOWN, true);
|
||||
triggerListeners.fireListeners(event.getSource(), event, TriggerEventProcessorStage.IGNORED, "In cooldown period.");
|
||||
|
@ -290,7 +290,7 @@ public class ScheduledTriggers implements Closeable {
|
|||
}
|
||||
actionExecutor.submit(() -> {
|
||||
assert hasPendingActions.get();
|
||||
long eventProcessingStart = cloudManager.getTimeSource().getTime();
|
||||
long eventProcessingStart = cloudManager.getTimeSource().getTimeNs();
|
||||
TriggerListeners triggerListeners1 = triggerListeners.copy();
|
||||
log.debug("-- processing actions for " + event);
|
||||
try {
|
||||
|
@ -321,13 +321,13 @@ public class ScheduledTriggers implements Closeable {
|
|||
} catch (Exception e) {
|
||||
log.warn("Exception executing actions", e);
|
||||
} finally {
|
||||
cooldownStart.set(cloudManager.getTimeSource().getTime());
|
||||
cooldownStart.set(cloudManager.getTimeSource().getTimeNs());
|
||||
hasPendingActions.set(false);
|
||||
// resume triggers after cool down period
|
||||
resumeTriggers(cloudManager.getTimeSource().convertDelay(TimeUnit.NANOSECONDS, cooldownPeriod.get(), TimeUnit.MILLISECONDS));
|
||||
}
|
||||
log.debug("-- processing took {} ms for event id={}",
|
||||
TimeUnit.NANOSECONDS.toMillis(cloudManager.getTimeSource().getTime() - eventProcessingStart), event.id);
|
||||
TimeUnit.NANOSECONDS.toMillis(cloudManager.getTimeSource().getTimeNs() - eventProcessingStart), event.id);
|
||||
});
|
||||
} else {
|
||||
if (enqueued) {
|
||||
|
|
|
@ -189,7 +189,7 @@ public class SearchRateTrigger extends TriggerBase {
|
|||
});
|
||||
}
|
||||
|
||||
long now = cloudManager.getTimeSource().getTime();
|
||||
long now = cloudManager.getTimeSource().getTimeNs();
|
||||
// check for exceeded rates and filter out those with less than waitFor from previous events
|
||||
Map<String, Double> hotNodes = nodeRates.entrySet().stream()
|
||||
.filter(entry -> node.equals(Policy.ANY) || node.equals(entry.getKey()))
|
||||
|
|
|
@ -52,7 +52,7 @@ public class TriggerEventQueue {
|
|||
}
|
||||
|
||||
public boolean offerEvent(TriggerEvent event) {
|
||||
event.getProperties().put(ENQUEUE_TIME, timeSource.getTime());
|
||||
event.getProperties().put(ENQUEUE_TIME, timeSource.getTimeNs());
|
||||
try {
|
||||
byte[] data = Utils.toJSON(event);
|
||||
delegate.offer(data);
|
||||
|
@ -114,7 +114,7 @@ public class TriggerEventQueue {
|
|||
TriggerEventType eventType = TriggerEventType.valueOf((String)map.get("eventType"));
|
||||
Map<String, Object> properties = (Map<String, Object>)map.get("properties");
|
||||
TriggerEvent res = new TriggerEvent(id, eventType, source, eventTime, properties);
|
||||
res.getProperties().put(DEQUEUE_TIME, timeSource.getTime());
|
||||
res.getProperties().put(DEQUEUE_TIME, timeSource.getTimeNs());
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -179,7 +179,7 @@ public class SliceMutator {
|
|||
}
|
||||
props.put(ZkStateReader.STATE_PROP, message.getStr(key));
|
||||
// we need to use epoch time so that it's comparable across Overseer restarts
|
||||
props.put(ZkStateReader.STATE_TIMESTAMP_PROP, String.valueOf(dataProvider.getTimeSource().getEpochTime()));
|
||||
props.put(ZkStateReader.STATE_TIMESTAMP_PROP, String.valueOf(dataProvider.getTimeSource().getEpochTimeNs()));
|
||||
Slice newSlice = new Slice(slice.getName(), slice.getReplicasCopy(), props);
|
||||
slicesCopy.put(slice.getName(), newSlice);
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ public class IdUtils {
|
|||
* uses {@link TimeSource#CURRENT_TIME} for timestamp values.
|
||||
*/
|
||||
public static final String timeRandomId() {
|
||||
return timeRandomId(TimeUnit.MILLISECONDS.convert(TimeSource.CURRENT_TIME.getTime(), TimeUnit.NANOSECONDS));
|
||||
return timeRandomId(TimeUnit.MILLISECONDS.convert(TimeSource.CURRENT_TIME.getTimeNs(), TimeUnit.NANOSECONDS));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,12 +29,12 @@ public class TimeOut {
|
|||
|
||||
public TimeOut(long interval, TimeUnit unit, TimeSource timeSource) {
|
||||
this.timeSource = timeSource;
|
||||
startTime = timeSource.getTime();
|
||||
startTime = timeSource.getTimeNs();
|
||||
this.timeoutAt = startTime + NANOSECONDS.convert(interval, unit);
|
||||
}
|
||||
|
||||
public boolean hasTimedOut() {
|
||||
return timeSource.getTime() > timeoutAt;
|
||||
return timeSource.getTimeNs() > timeoutAt;
|
||||
}
|
||||
|
||||
public void sleep(long ms) throws InterruptedException {
|
||||
|
@ -42,10 +42,10 @@ public class TimeOut {
|
|||
}
|
||||
|
||||
public long timeLeft(TimeUnit unit) {
|
||||
return unit.convert(timeoutAt - timeSource.getTime(), NANOSECONDS);
|
||||
return unit.convert(timeoutAt - timeSource.getTimeNs(), NANOSECONDS);
|
||||
}
|
||||
|
||||
public long timeElapsed(TimeUnit unit) {
|
||||
return unit.convert(timeSource.getTime() - startTime, NANOSECONDS);
|
||||
return unit.convert(timeSource.getTimeNs() - startTime, NANOSECONDS);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,13 +36,13 @@ public class ActionThrottleTest extends SolrTestCaseJ4 {
|
|||
}
|
||||
|
||||
@Override
|
||||
public long getTime() {
|
||||
public long getTimeNs() {
|
||||
return returnValues.get(index++);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getEpochTime() {
|
||||
return getTime();
|
||||
public long getEpochTimeNs() {
|
||||
return getTimeNs();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -64,30 +64,30 @@ public class ActionThrottleTest extends SolrTestCaseJ4 {
|
|||
public void testBasics() throws Exception {
|
||||
|
||||
ActionThrottle at = new ActionThrottle("test", 1000);
|
||||
long start = timeSource.getTime();
|
||||
long start = timeSource.getTimeNs();
|
||||
|
||||
at.minimumWaitBetweenActions();
|
||||
|
||||
// should be no wait
|
||||
assertTrue(TimeUnit.MILLISECONDS.convert(timeSource.getTime() - start, TimeUnit.NANOSECONDS) < 1000);
|
||||
assertTrue(TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS) < 1000);
|
||||
at.markAttemptingAction();
|
||||
|
||||
if (random().nextBoolean()) Thread.sleep(100);
|
||||
|
||||
at.minimumWaitBetweenActions();
|
||||
|
||||
long elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTime() - start, TimeUnit.NANOSECONDS);
|
||||
long elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS);
|
||||
|
||||
assertTrue(elaspsedTime + "ms", elaspsedTime >= 995);
|
||||
|
||||
start = timeSource.getTime();
|
||||
start = timeSource.getTimeNs();
|
||||
|
||||
at.markAttemptingAction();
|
||||
at.minimumWaitBetweenActions();
|
||||
|
||||
Thread.sleep(random().nextInt(1000));
|
||||
|
||||
elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTime() - start, TimeUnit.NANOSECONDS);
|
||||
elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS);
|
||||
|
||||
assertTrue(elaspsedTime + "ms", elaspsedTime >= 995);
|
||||
}
|
||||
|
@ -96,13 +96,13 @@ public class ActionThrottleTest extends SolrTestCaseJ4 {
|
|||
public void testAZeroNanoTimeReturnInWait() throws Exception {
|
||||
|
||||
ActionThrottle at = new ActionThrottle("test", 1000, new TestNanoTimeSource(Arrays.asList(new Long[]{0L, 10L})));
|
||||
long start = timeSource.getTime();
|
||||
long start = timeSource.getTimeNs();
|
||||
|
||||
at.markAttemptingAction();
|
||||
|
||||
at.minimumWaitBetweenActions();
|
||||
|
||||
long elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTime() - start, TimeUnit.NANOSECONDS);
|
||||
long elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS);
|
||||
|
||||
assertTrue(elaspsedTime + "ms", elaspsedTime >= 995);
|
||||
|
||||
|
|
|
@ -140,7 +140,7 @@ public class ExecutePlanActionTest extends SolrCloudTestCase {
|
|||
};
|
||||
List<CollectionAdminRequest.AsyncCollectionAdminRequest> operations = Lists.asList(moveReplica, new CollectionAdminRequest.AsyncCollectionAdminRequest[]{mockRequest});
|
||||
NodeLostTrigger.NodeLostEvent nodeLostEvent = new NodeLostTrigger.NodeLostEvent(TriggerEventType.NODELOST,
|
||||
"mock_trigger_name", Collections.singletonList(TimeSource.CURRENT_TIME.getTime()),
|
||||
"mock_trigger_name", Collections.singletonList(TimeSource.CURRENT_TIME.getTimeNs()),
|
||||
Collections.singletonList(sourceNodeName));
|
||||
ActionContext actionContext = new ActionContext(survivor.getCoreContainer().getZkController().getSolrCloudManager(), null,
|
||||
new HashMap<>(Collections.singletonMap("operations", operations)));
|
||||
|
|
|
@ -84,7 +84,7 @@ public class NodeAddedTriggerTest extends SolrCloudTestCase {
|
|||
trigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
eventRef.set(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -124,7 +124,7 @@ public class NodeAddedTriggerTest extends SolrCloudTestCase {
|
|||
AtomicBoolean fired = new AtomicBoolean(false);
|
||||
trigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -264,7 +264,7 @@ public class NodeAddedTriggerTest extends SolrCloudTestCase {
|
|||
newTrigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
eventRef.set(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
|
|
@ -88,7 +88,7 @@ public class NodeLostTriggerTest extends SolrCloudTestCase {
|
|||
trigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
eventRef.set(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -130,7 +130,7 @@ public class NodeLostTriggerTest extends SolrCloudTestCase {
|
|||
AtomicBoolean fired = new AtomicBoolean(false);
|
||||
trigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitTime, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -307,7 +307,7 @@ public class NodeLostTriggerTest extends SolrCloudTestCase {
|
|||
newTrigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
eventRef.set(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
|
|
@ -127,7 +127,7 @@ public class ScheduledMaintenanceTriggerTest extends SolrCloudTestCase {
|
|||
public synchronized void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName,
|
||||
ActionContext context, Throwable error, String message) {
|
||||
List<CapturedEvent> lst = listenerEvents.computeIfAbsent(config.name, s -> new ArrayList<>());
|
||||
CapturedEvent ev = new CapturedEvent(timeSource.getTime(), context, config, stage, actionName, event, message);
|
||||
CapturedEvent ev = new CapturedEvent(timeSource.getTimeNs(), context, config, stage, actionName, event, message);
|
||||
log.info("=======> " + ev);
|
||||
lst.add(ev);
|
||||
}
|
||||
|
|
|
@ -318,7 +318,7 @@ public class TriggerIntegrationTest extends SolrCloudTestCase {
|
|||
return;
|
||||
}
|
||||
try {
|
||||
long currentTime = timeSource.getTime();
|
||||
long currentTime = timeSource.getTimeNs();
|
||||
if (lastActionExecutedAt.get() != 0) {
|
||||
long minDiff = TimeUnit.MILLISECONDS.toNanos(throttlingDelayMs.get() - DELTA_MS);
|
||||
log.info("last action at " + lastActionExecutedAt.get() + " current time = " + currentTime +
|
||||
|
@ -646,7 +646,7 @@ public class TriggerIntegrationTest extends SolrCloudTestCase {
|
|||
try {
|
||||
if (triggerFired.compareAndSet(false, true)) {
|
||||
events.add(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -1017,7 +1017,7 @@ public class TriggerIntegrationTest extends SolrCloudTestCase {
|
|||
public synchronized void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName,
|
||||
ActionContext context, Throwable error, String message) {
|
||||
List<CapturedEvent> lst = listenerEvents.computeIfAbsent(config.name, s -> new ArrayList<>());
|
||||
lst.add(new CapturedEvent(timeSource.getTime(), context, config, stage, actionName, event, message));
|
||||
lst.add(new CapturedEvent(timeSource.getTimeNs(), context, config, stage, actionName, event, message));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1315,7 +1315,7 @@ public class TriggerIntegrationTest extends SolrCloudTestCase {
|
|||
public void run() {
|
||||
if (getTriggerFiredLatch().getCount() == 0) return;
|
||||
long l = diff.get();
|
||||
diff.set(timeSource.getTime() - l);
|
||||
diff.set(timeSource.getTimeNs() - l);
|
||||
getTriggerFiredLatch().countDown();
|
||||
}
|
||||
});
|
||||
|
@ -1414,7 +1414,7 @@ public class TriggerIntegrationTest extends SolrCloudTestCase {
|
|||
public void process(TriggerEvent event, ActionContext context) throws Exception {
|
||||
try {
|
||||
events.add(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -1490,7 +1490,7 @@ public class TriggerIntegrationTest extends SolrCloudTestCase {
|
|||
assertNull(events.get(3).actionName);
|
||||
|
||||
CapturedEvent ev = events.get(0);
|
||||
long now = timeSource.getTime();
|
||||
long now = timeSource.getTimeNs();
|
||||
// verify waitFor
|
||||
assertTrue(TimeUnit.SECONDS.convert(waitForSeconds, TimeUnit.NANOSECONDS) - WAIT_FOR_DELTA_NANOS <= now - ev.event.getEventTime());
|
||||
Map<String, Double> nodeRates = (Map<String, Double>)ev.event.getProperties().get("node");
|
||||
|
@ -1608,7 +1608,7 @@ public class TriggerIntegrationTest extends SolrCloudTestCase {
|
|||
Thread.sleep(2000);
|
||||
assertEquals(listenerEvents.toString(), 4, listenerEvents.get("srt").size());
|
||||
CapturedEvent ev = listenerEvents.get("srt").get(0);
|
||||
long now = timeSource.getTime();
|
||||
long now = timeSource.getTimeNs();
|
||||
// verify waitFor
|
||||
assertTrue(TimeUnit.SECONDS.convert(waitForSeconds, TimeUnit.NANOSECONDS) - WAIT_FOR_DELTA_NANOS <= now - ev.event.getEventTime());
|
||||
assertEquals(collectionName, ev.event.getProperties().get("collection"));
|
||||
|
@ -1651,7 +1651,7 @@ public class TriggerIntegrationTest extends SolrCloudTestCase {
|
|||
Thread.sleep(2000);
|
||||
assertEquals(listenerEvents.toString(), 4, listenerEvents.get("srt").size());
|
||||
ev = listenerEvents.get("srt").get(0);
|
||||
now = timeSource.getTime();
|
||||
now = timeSource.getTimeNs();
|
||||
// verify waitFor
|
||||
assertTrue(TimeUnit.SECONDS.convert(waitForSeconds, TimeUnit.NANOSECONDS) - WAIT_FOR_DELTA_NANOS <= now - ev.event.getEventTime());
|
||||
assertEquals(collectionName, ev.event.getProperties().get("collection"));
|
||||
|
|
|
@ -904,7 +904,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
|
|||
Map<String, Object> props = sliceProperties.computeIfAbsent(collectionName, c -> new ConcurrentHashMap<>())
|
||||
.computeIfAbsent(sliceName.get(), s -> new ConcurrentHashMap<>());
|
||||
props.put(ZkStateReader.STATE_PROP, Slice.State.INACTIVE.toString());
|
||||
props.put(ZkStateReader.STATE_TIMESTAMP_PROP, String.valueOf(cloudManager.getTimeSource().getEpochTime()));
|
||||
props.put(ZkStateReader.STATE_TIMESTAMP_PROP, String.valueOf(cloudManager.getTimeSource().getEpochTimeNs()));
|
||||
// add slice props
|
||||
for (int i = 0; i < subRanges.size(); i++) {
|
||||
String subSlice = subSlices.get(i);
|
||||
|
@ -914,7 +914,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
|
|||
sliceProps.put(Slice.RANGE, range);
|
||||
sliceProps.put(Slice.PARENT, sliceName.get());
|
||||
sliceProps.put(ZkStateReader.STATE_PROP, Slice.State.ACTIVE.toString());
|
||||
props.put(ZkStateReader.STATE_TIMESTAMP_PROP, String.valueOf(cloudManager.getTimeSource().getEpochTime()));
|
||||
props.put(ZkStateReader.STATE_TIMESTAMP_PROP, String.valueOf(cloudManager.getTimeSource().getEpochTimeNs()));
|
||||
}
|
||||
simRunLeaderElection(Collections.singleton(collectionName), true);
|
||||
results.add("success", "");
|
||||
|
|
|
@ -134,7 +134,7 @@ public class TestExecutePlanAction extends SimSolrCloudTestCase {
|
|||
};
|
||||
List<CollectionAdminRequest.AsyncCollectionAdminRequest> operations = Lists.asList(moveReplica, new CollectionAdminRequest.AsyncCollectionAdminRequest[]{mockRequest});
|
||||
NodeLostTrigger.NodeLostEvent nodeLostEvent = new NodeLostTrigger.NodeLostEvent(TriggerEventType.NODELOST,
|
||||
"mock_trigger_name", Collections.singletonList(TimeSource.CURRENT_TIME.getTime()),
|
||||
"mock_trigger_name", Collections.singletonList(TimeSource.CURRENT_TIME.getTimeNs()),
|
||||
Collections.singletonList(sourceNodeName));
|
||||
ActionContext actionContext = new ActionContext(cluster, null,
|
||||
new HashMap<>(Collections.singletonMap("operations", operations)));
|
||||
|
|
|
@ -116,7 +116,7 @@ public class TestLargeCluster extends SimSolrCloudTestCase {
|
|||
public synchronized void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName,
|
||||
ActionContext context, Throwable error, String message) {
|
||||
List<CapturedEvent> lst = listenerEvents.computeIfAbsent(config.name, s -> new ArrayList<>());
|
||||
lst.add(new CapturedEvent(cluster.getTimeSource().getTime(), context, config, stage, actionName, event, message));
|
||||
lst.add(new CapturedEvent(cluster.getTimeSource().getTimeNs(), context, config, stage, actionName, event, message));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -86,7 +86,7 @@ public class TestNodeAddedTrigger extends SimSolrCloudTestCase {
|
|||
trigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
eventRef.set(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -125,7 +125,7 @@ public class TestNodeAddedTrigger extends SimSolrCloudTestCase {
|
|||
AtomicBoolean fired = new AtomicBoolean(false);
|
||||
trigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -257,7 +257,7 @@ public class TestNodeAddedTrigger extends SimSolrCloudTestCase {
|
|||
newTrigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
eventRef.set(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
|
|
@ -90,7 +90,7 @@ public class TestNodeLostTrigger extends SimSolrCloudTestCase {
|
|||
trigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
eventRef.set(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -131,7 +131,7 @@ public class TestNodeLostTrigger extends SimSolrCloudTestCase {
|
|||
AtomicBoolean fired = new AtomicBoolean(false);
|
||||
trigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitTime, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -283,7 +283,7 @@ public class TestNodeLostTrigger extends SimSolrCloudTestCase {
|
|||
newTrigger.setProcessor(event -> {
|
||||
if (fired.compareAndSet(false, true)) {
|
||||
eventRef.set(event);
|
||||
long currentTimeNanos = timeSource.getTime();
|
||||
long currentTimeNanos = timeSource.getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
|
|
@ -242,8 +242,8 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
|
|||
}
|
||||
try {
|
||||
if (lastActionExecutedAt.get() != 0) {
|
||||
log.info("last action at " + lastActionExecutedAt.get() + " time = " + cluster.getTimeSource().getTime());
|
||||
if (TimeUnit.NANOSECONDS.toMillis(cluster.getTimeSource().getTime() - lastActionExecutedAt.get()) <
|
||||
log.info("last action at " + lastActionExecutedAt.get() + " time = " + cluster.getTimeSource().getTimeNs());
|
||||
if (TimeUnit.NANOSECONDS.toMillis(cluster.getTimeSource().getTimeNs() - lastActionExecutedAt.get()) <
|
||||
TimeUnit.SECONDS.toMillis(ScheduledTriggers.DEFAULT_ACTION_THROTTLE_PERIOD_SECONDS) - DELTA_MS) {
|
||||
log.info("action executed again before minimum wait time from {}", event.getSource());
|
||||
fail("TriggerListener was fired before the throttling period");
|
||||
|
@ -251,7 +251,7 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
|
|||
}
|
||||
if (onlyOnce.compareAndSet(false, true)) {
|
||||
log.info("action executed from {}", event.getSource());
|
||||
lastActionExecutedAt.set(cluster.getTimeSource().getTime());
|
||||
lastActionExecutedAt.set(cluster.getTimeSource().getTimeNs());
|
||||
getTriggerFiredLatch().countDown();
|
||||
} else {
|
||||
log.info("action executed more than once from {}", event.getSource());
|
||||
|
@ -552,7 +552,7 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
|
|||
try {
|
||||
if (triggerFired.compareAndSet(false, true)) {
|
||||
events.add(event);
|
||||
long currentTimeNanos = cluster.getTimeSource().getTime();
|
||||
long currentTimeNanos = cluster.getTimeSource().getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -892,7 +892,7 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
|
|||
public synchronized void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName,
|
||||
ActionContext context, Throwable error, String message) {
|
||||
List<CapturedEvent> lst = listenerEvents.computeIfAbsent(config.name, s -> new ArrayList<>());
|
||||
lst.add(new CapturedEvent(cluster.getTimeSource().getTime(), context, config, stage, actionName, event, message));
|
||||
lst.add(new CapturedEvent(cluster.getTimeSource().getTimeNs(), context, config, stage, actionName, event, message));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1128,7 +1128,7 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
|
|||
public void process(TriggerEvent event, ActionContext context) throws Exception {
|
||||
try {
|
||||
events.add(event);
|
||||
long currentTimeNanos = cluster.getTimeSource().getTime();
|
||||
long currentTimeNanos = cluster.getTimeSource().getTimeNs();
|
||||
long eventTimeNanos = event.getEventTime();
|
||||
long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
|
||||
if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
|
||||
|
@ -1207,7 +1207,7 @@ public class TestTriggerIntegration extends SimSolrCloudTestCase {
|
|||
assertNull(events.get(3).actionName);
|
||||
|
||||
CapturedEvent ev = events.get(0);
|
||||
long now = cluster.getTimeSource().getTime();
|
||||
long now = cluster.getTimeSource().getTimeNs();
|
||||
// verify waitFor
|
||||
assertTrue(TimeUnit.SECONDS.convert(waitForSeconds, TimeUnit.NANOSECONDS) - WAIT_FOR_DELTA_NANOS <= now - ev.event.getEventTime());
|
||||
Map<String, Double> nodeRates = (Map<String, Double>)ev.event.getProperties().get("node");
|
||||
|
|
|
@ -322,7 +322,7 @@ public class PolicyHelper {
|
|||
TimeSource timeSource = cloudManager.getTimeSource();
|
||||
synchronized (lockObj) {
|
||||
if (sessionWrapper.status == Status.NULL ||
|
||||
TimeUnit.SECONDS.convert(timeSource.getTime() - sessionWrapper.lastUpdateTime, TimeUnit.NANOSECONDS) > SESSION_EXPIRY) {
|
||||
TimeUnit.SECONDS.convert(timeSource.getTimeNs() - sessionWrapper.lastUpdateTime, TimeUnit.NANOSECONDS) > SESSION_EXPIRY) {
|
||||
//no session available or the session is expired
|
||||
return createSession(cloudManager);
|
||||
} else {
|
||||
|
@ -425,8 +425,8 @@ public class PolicyHelper {
|
|||
|
||||
public SessionWrapper(Policy.Session session, SessionRef ref) {
|
||||
lastUpdateTime = createTime = session != null ?
|
||||
session.cloudManager.getTimeSource().getTime() :
|
||||
TimeSource.NANO_TIME.getTime();
|
||||
session.cloudManager.getTimeSource().getTimeNs() :
|
||||
TimeSource.NANO_TIME.getTimeNs();
|
||||
this.session = session;
|
||||
this.status = Status.UNUSED;
|
||||
this.ref = ref;
|
||||
|
@ -438,8 +438,8 @@ public class PolicyHelper {
|
|||
|
||||
public SessionWrapper update(Policy.Session session) {
|
||||
this.lastUpdateTime = session != null ?
|
||||
session.cloudManager.getTimeSource().getTime() :
|
||||
TimeSource.NANO_TIME.getTime();
|
||||
session.cloudManager.getTimeSource().getTimeNs() :
|
||||
TimeSource.NANO_TIME.getTimeNs();
|
||||
this.session = session;
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -35,20 +35,20 @@ public abstract class TimeSource {
|
|||
|
||||
/**
|
||||
* Implementation that uses {@link System#currentTimeMillis()}.
|
||||
* This implementation's {@link #getTime()} returns the same values as
|
||||
* {@link #getEpochTime()}.
|
||||
* This implementation's {@link #getTimeNs()} returns the same values as
|
||||
* {@link #getEpochTimeNs()}.
|
||||
*/
|
||||
public static final class CurrentTimeSource extends TimeSource {
|
||||
|
||||
@Override
|
||||
@SuppressForbidden(reason = "Needed to provide timestamps based on currentTimeMillis.")
|
||||
public long getTime() {
|
||||
public long getTimeNs() {
|
||||
return TimeUnit.NANOSECONDS.convert(System.currentTimeMillis(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getEpochTime() {
|
||||
return getTime();
|
||||
public long getEpochTimeNs() {
|
||||
return getTimeNs();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -73,18 +73,18 @@ public abstract class TimeSource {
|
|||
private final long nanoStart;
|
||||
|
||||
public NanoTimeSource() {
|
||||
epochStart = CURRENT_TIME.getTime();
|
||||
epochStart = CURRENT_TIME.getTimeNs();
|
||||
nanoStart = System.nanoTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTime() {
|
||||
public long getTimeNs() {
|
||||
return System.nanoTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getEpochTime() {
|
||||
return epochStart + getTime() - nanoStart;
|
||||
public long getEpochTimeNs() {
|
||||
return epochStart + getTimeNs() - nanoStart;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -111,18 +111,18 @@ public abstract class TimeSource {
|
|||
*/
|
||||
public SimTimeSource(double multiplier) {
|
||||
this.multiplier = multiplier;
|
||||
epochStart = CURRENT_TIME.getTime();
|
||||
nanoStart = NANO_TIME.getTime();
|
||||
epochStart = CURRENT_TIME.getTimeNs();
|
||||
nanoStart = NANO_TIME.getTimeNs();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTime() {
|
||||
return nanoStart + Math.round((double)(NANO_TIME.getTime() - nanoStart) * multiplier);
|
||||
public long getTimeNs() {
|
||||
return nanoStart + Math.round((double)(NANO_TIME.getTimeNs() - nanoStart) * multiplier);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getEpochTime() {
|
||||
return epochStart + getTime() - nanoStart;
|
||||
public long getEpochTimeNs() {
|
||||
return epochStart + getTimeNs() - nanoStart;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -189,17 +189,38 @@ public abstract class TimeSource {
|
|||
* Return a time value, in nanosecond units. Depending on implementation this value may or
|
||||
* may not be related to Epoch time.
|
||||
*/
|
||||
public abstract long getTime();
|
||||
public abstract long getTimeNs();
|
||||
|
||||
/**
|
||||
* Return Epoch time. Implementations that are not natively based on epoch time may
|
||||
* return values that are consistently off by a (small) fixed number of milliseconds from
|
||||
* the actual epoch time.
|
||||
*/
|
||||
public abstract long getEpochTime();
|
||||
public abstract long getEpochTimeNs();
|
||||
|
||||
/**
|
||||
* Sleep according to this source's notion of time. Eg. accelerated time source such as
|
||||
* {@link SimTimeSource} will sleep proportionally shorter, according to its multiplier.
|
||||
* @param ms number of milliseconds to sleep
|
||||
* @throws InterruptedException when the current thread is interrupted
|
||||
*/
|
||||
public abstract void sleep(long ms) throws InterruptedException;
|
||||
|
||||
/**
|
||||
* This method allows using TimeSource with APIs that require providing just plain time intervals,
|
||||
* eg. {@link Object#wait(long)}. Values returned by this method are adjusted according to the
|
||||
* time source's notion of time - eg. accelerated time source provided by {@link SimTimeSource}
|
||||
* will return intervals that are proportionally shortened by the multiplier.
|
||||
* <p>NOTE: converting small values may significantly affect precision of the returned values
|
||||
* due to rounding, especially for accelerated time source, so care should be taken to use time
|
||||
* units that result in relatively large values. For example, converting a value of 1 expressed
|
||||
* in seconds would result in less precision than converting a value of 1000 expressed in milliseconds.</p>
|
||||
* @param fromUnit source unit
|
||||
* @param value original value
|
||||
* @param toUnit target unit
|
||||
* @return converted value, possibly scaled by the source's notion of accelerated time
|
||||
* (see {@link SimTimeSource})
|
||||
*/
|
||||
public abstract long convertDelay(TimeUnit fromUnit, long value, TimeUnit toUnit);
|
||||
|
||||
public String toString() {
|
||||
|
|
|
@ -471,10 +471,10 @@ public class Utils {
|
|||
}
|
||||
|
||||
public static long time(TimeSource timeSource, TimeUnit unit) {
|
||||
return unit.convert(timeSource.getTime(), TimeUnit.NANOSECONDS);
|
||||
return unit.convert(timeSource.getTimeNs(), TimeUnit.NANOSECONDS);
|
||||
}
|
||||
|
||||
public static long timeElapsed(TimeSource timeSource, long start, TimeUnit unit) {
|
||||
return unit.convert(timeSource.getTime() - NANOSECONDS.convert(start, unit), NANOSECONDS);
|
||||
return unit.convert(timeSource.getTimeNs() - NANOSECONDS.convert(start, unit), NANOSECONDS);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,8 +33,8 @@ public class TestTimeSource extends SolrTestCaseJ4 {
|
|||
}
|
||||
|
||||
private void doTestEpochTime(TimeSource ts) throws Exception {
|
||||
long prevTime = ts.getTime();
|
||||
long prevEpochTime = ts.getEpochTime();
|
||||
long prevTime = ts.getTimeNs();
|
||||
long prevEpochTime = ts.getEpochTimeNs();
|
||||
long delta = 500000000; // 500 ms
|
||||
long maxDiff = 200000;
|
||||
if (ts instanceof TimeSource.SimTimeSource) {
|
||||
|
@ -42,8 +42,8 @@ public class TestTimeSource extends SolrTestCaseJ4 {
|
|||
}
|
||||
for (int i = 0; i < 10; i++) {
|
||||
ts.sleep(500);
|
||||
long curTime = ts.getTime();
|
||||
long curEpochTime = ts.getEpochTime();
|
||||
long curTime = ts.getTimeNs();
|
||||
long curEpochTime = ts.getEpochTimeNs();
|
||||
long diff = prevTime + delta - curTime;
|
||||
assertTrue(ts + " time diff=" + diff, diff < maxDiff);
|
||||
diff = prevEpochTime + delta - curEpochTime;
|
||||
|
|
Loading…
Reference in New Issue