Introduce translog no-op

As the translog evolves towards a full operations log as part of the
sequence numbers push, there is a need for the translog to be able to
represent operations for which a sequence number was assigned, but the
operation did not mutate the index. Examples of how this can arise are
operations that fail after the sequence number is assigned, and gaps in
this history that arise when an operation is assigned a sequence number
but the operation never completed (e.g., a node crash). It is important
that these operations appear in the history so that they can be
replicated and replayed during recovery as otherwise the history will be
incomplete and local checkpoints will not be able to advance. This
commit introduces a no-op to the translog to set the stage for these
efforts.

Relates #22291
This commit is contained in:
Jason Tedor 2016-12-21 23:08:16 -05:00 committed by GitHub
parent 91cb563247
commit 7946396fe6
37 changed files with 502 additions and 191 deletions

View File

@ -328,7 +328,7 @@ public abstract class StreamOutput extends OutputStream {
// make sure any possible char can fit into the buffer in any possible iteration
// we need at most 3 bytes so we flush the buffer once we have less than 3 bytes
// left before we start another iteration
if (offset > buffer.length-3) {
if (offset > buffer.length - 3) {
writeBytes(buffer, offset);
offset = 0;
}

View File

@ -47,7 +47,6 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
@ -296,6 +295,8 @@ public abstract class Engine implements Closeable {
*/
public abstract DeleteResult delete(final Delete delete);
public abstract NoOpResult noOp(final NoOp noOp);
/**
* Base class for index and delete operation results
* Holds result meta data (e.g. translog location, updated version)
@ -382,6 +383,7 @@ public abstract class Engine implements Closeable {
}
public static class IndexResult extends Result {
private final boolean created;
public IndexResult(long version, long seqNo, boolean created) {
@ -397,9 +399,11 @@ public abstract class Engine implements Closeable {
public boolean isCreated() {
return created;
}
}
public static class DeleteResult extends Result {
private final boolean found;
public DeleteResult(long version, long seqNo, boolean found) {
@ -415,6 +419,19 @@ public abstract class Engine implements Closeable {
public boolean isFound() {
return found;
}
}
static class NoOpResult extends Result {
NoOpResult(long seqNo) {
super(Operation.TYPE.NO_OP, 0, seqNo);
}
NoOpResult(long seqNo, Exception failure) {
super(Operation.TYPE.NO_OP, failure, 0, seqNo);
}
}
/**
@ -910,7 +927,7 @@ public abstract class Engine implements Closeable {
/** type of operation (index, delete), subclasses use static types */
public enum TYPE {
INDEX, DELETE;
INDEX, DELETE, NO_OP;
private final String lowercase;
@ -1114,6 +1131,50 @@ public abstract class Engine implements Closeable {
public int estimatedSizeInBytes() {
return (uid().field().length() + uid().text().length()) * 2 + 20;
}
}
public static class NoOp extends Operation {
private final String reason;
public String reason() {
return reason;
}
public NoOp(
final Term uid,
final long seqNo,
final long primaryTerm,
final long version,
final VersionType versionType,
final Origin origin,
final long startTime,
final String reason) {
super(uid, seqNo, primaryTerm, version, versionType, origin, startTime);
this.reason = reason;
}
@Override
public String type() {
throw new UnsupportedOperationException();
}
@Override
String id() {
throw new UnsupportedOperationException();
}
@Override
TYPE operationType() {
return TYPE.NO_OP;
}
@Override
public int estimatedSizeInBytes() {
return 2 * reason.length() + 2 * Long.BYTES;
}
}
public static class Get {

View File

@ -277,7 +277,7 @@ public class InternalEngine extends Engine {
}
// flush if we recovered something or if we have references to older translogs
// note: if opsRecovered == 0 and we have older translogs it means they are corrupted or 0 length.
assert pendingTranslogRecovery.get(): "translogRecovery is not pending but should be";
assert pendingTranslogRecovery.get() : "translogRecovery is not pending but should be";
pendingTranslogRecovery.set(false); // we are good - now we can commit
if (opsRecovered > 0) {
logger.trace("flushing post recovery from translog. ops recovered [{}]. committed translog id [{}]. current id [{}]",
@ -375,7 +375,7 @@ public class InternalEngine extends Engine {
* specified global checkpoint.
*
* @param globalCheckpoint the global checkpoint to use
* @param indexWriter the index writer (for the Lucene commit point)
* @param indexWriter the index writer (for the Lucene commit point)
* @return the sequence number stats
*/
private static SeqNoStats loadSeqNoStatsFromLucene(final long globalCheckpoint, final IndexWriter indexWriter) {
@ -434,7 +434,7 @@ public class InternalEngine extends Engine {
if (get.versionType().isVersionConflictForReads(versionValue.version(), get.version())) {
Uid uid = Uid.createUid(get.uid().text());
throw new VersionConflictEngineException(shardId, uid.type(), uid.id(),
get.versionType().explainConflictForReads(versionValue.version(), get.version()));
get.versionType().explainConflictForReads(versionValue.version(), get.version()));
}
refresh("realtime_get");
}
@ -532,7 +532,7 @@ public class InternalEngine extends Engine {
*
* @return failure if the failure is a document specific failure (e.g. analysis chain failure)
* or throws Exception if the failure caused the engine to fail (e.g. out of disk, lucene tragic event)
*
* <p>
* Note: pkg-private for testing
*/
final Exception checkIfDocumentFailureOrThrow(final Operation operation, final Exception failure) {
@ -577,7 +577,7 @@ public class InternalEngine extends Engine {
case PEER_RECOVERY:
case REPLICA:
assert index.version() == 1 && index.versionType() == VersionType.EXTERNAL
: "version: " + index.version() + " type: " + index.versionType();
: "version: " + index.version() + " type: " + index.versionType();
return true;
case LOCAL_TRANSLOG_RECOVERY:
assert index.isRetry();
@ -596,10 +596,10 @@ public class InternalEngine extends Engine {
" index version: " + engineConfig.getIndexSettings().getIndexVersionCreated() + ". seq no: " + seqNo;
} else if (origin == Operation.Origin.PRIMARY) {
// sequence number should not be set when operation origin is primary
assert seqNo == SequenceNumbersService.UNASSIGNED_SEQ_NO : "primary ops should never an assigned seq no. got: " + seqNo;
assert seqNo == SequenceNumbersService.UNASSIGNED_SEQ_NO : "primary ops should never have an assigned seq no. got: " + seqNo;
} else {
// sequence number should be set when operation origin is not primary
assert seqNo >= 0 : "replica ops should an assigned seq no. origin: " + origin +
assert seqNo >= 0 : "recovery or replica ops should have an assigned seq no. origin: " + origin +
" index version: " + engineConfig.getIndexSettings().getIndexVersionCreated();
}
return true;
@ -651,7 +651,7 @@ public class InternalEngine extends Engine {
if (deOptimizeTimestamp >= index.getAutoGeneratedIdTimestamp()) {
break;
}
} while(maxUnsafeAutoIdTimestamp.compareAndSet(deOptimizeTimestamp,
} while (maxUnsafeAutoIdTimestamp.compareAndSet(deOptimizeTimestamp,
index.getAutoGeneratedIdTimestamp()) == false);
assert maxUnsafeAutoIdTimestamp.get() >= index.getAutoGeneratedIdTimestamp();
} else {
@ -859,6 +859,34 @@ public class InternalEngine extends Engine {
return found;
}
@Override
public NoOpResult noOp(final NoOp noOp) {
NoOpResult noOpResult;
try (final ReleasableLock ignored = readLock.acquire()) {
noOpResult = innerNoOp(noOp);
} catch (final Exception e) {
noOpResult = new NoOpResult(noOp.seqNo(), e);
}
return noOpResult;
}
private NoOpResult innerNoOp(final NoOp noOp) throws IOException {
assert noOp.seqNo() > SequenceNumbersService.NO_OPS_PERFORMED;
final long seqNo = noOp.seqNo();
try {
final NoOpResult noOpResult = new NoOpResult(noOp.seqNo());
final Translog.Location location = translog.add(new Translog.NoOp(noOp.seqNo(), noOp.primaryTerm(), noOp.reason()));
noOpResult.setTranslogLocation(location);
noOpResult.setTook(System.nanoTime() - noOp.startTime());
noOpResult.freeze();
return noOpResult;
} finally {
if (seqNo != SequenceNumbersService.UNASSIGNED_SEQ_NO) {
seqNoService().markSeqNoAsCompleted(seqNo);
}
}
}
@Override
public void refresh(String source) throws EngineException {
// we obtain a read lock here, since we don't want a flush to happen while we are refreshing

View File

@ -116,6 +116,11 @@ public class ShadowEngine extends Engine {
throw new UnsupportedOperationException(shardId + " delete operation not allowed on shadow engine");
}
@Override
public NoOpResult noOp(NoOp noOp) {
throw new UnsupportedOperationException(shardId + " no-op operation not allowed on shadow engine");
}
@Override
public SyncedFlushResult syncFlush(String syncId, CommitId expectedCommitId) {
throw new UnsupportedOperationException(shardId + " sync commit operation not allowed on shadow engine");

View File

@ -22,6 +22,7 @@ import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException;
import org.elasticsearch.index.mapper.DocumentMapperForType;
@ -29,6 +30,7 @@ import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.seqno.SequenceNumbersService;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.rest.RestStatus;
@ -94,6 +96,7 @@ public class TranslogRecoveryPerformer {
}
}
}
return opsRecovered;
}
@ -158,22 +161,28 @@ public class TranslogRecoveryPerformer {
.routing(index.routing()).parent(index.parent()), index.seqNo(), index.primaryTerm(),
index.version(), index.versionType().versionTypeForReplicationAndRecovery(), origin, index.getAutoGeneratedIdTimestamp(), true);
maybeAddMappingUpdate(engineIndex.type(), engineIndex.parsedDoc().dynamicMappingsUpdate(), engineIndex.id(), allowMappingUpdates);
if (logger.isTraceEnabled()) {
logger.trace("[translog] recover [index] op of [{}][{}]", index.type(), index.id());
}
logger.trace("[translog] recover [index] op [({}, {})] of [{}][{}]", index.seqNo(), index.primaryTerm(), index.type(), index.id());
index(engine, engineIndex);
break;
case DELETE:
Translog.Delete delete = (Translog.Delete) operation;
Uid uid = Uid.createUid(delete.uid().text());
if (logger.isTraceEnabled()) {
logger.trace("[translog] recover [delete] op of [{}][{}]", uid.type(), uid.id());
}
logger.trace("[translog] recover [delete] op [({}, {})] of [{}][{}]", delete.seqNo(), delete.primaryTerm(), uid.type(), uid.id());
final Engine.Delete engineDelete = new Engine.Delete(uid.type(), uid.id(), delete.uid(), delete.seqNo(),
delete.primaryTerm(), delete.version(), delete.versionType().versionTypeForReplicationAndRecovery(),
origin, System.nanoTime());
delete(engine, engineDelete);
break;
case NO_OP:
final Translog.NoOp noOp = (Translog.NoOp) operation;
final long seqNo = noOp.seqNo();
final long primaryTerm = noOp.primaryTerm();
final String reason = noOp.reason();
logger.trace("[translog] recover [no_op] op [({}, {})] of [{}]", seqNo, primaryTerm, reason);
final Engine.NoOp engineNoOp =
new Engine.NoOp(null, seqNo, primaryTerm, 0, VersionType.INTERNAL, origin, System.nanoTime(), reason);
noOp(engine, engineNoOp);
break;
default:
throw new IllegalStateException("No operation defined for [" + operation + "]");
}
@ -206,6 +215,9 @@ public class TranslogRecoveryPerformer {
engine.delete(engineDelete);
}
protected void noOp(Engine engine, Engine.NoOp engineNoOp) {
engine.noOp(engineNoOp);
}
/**
* Called once for every processed operation by this recovery performer.

View File

@ -55,6 +55,7 @@ import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
@ -743,7 +744,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
@Deprecated
CREATE((byte) 1),
INDEX((byte) 2),
DELETE((byte) 3);
DELETE((byte) 3),
NO_OP((byte) 4);
private final byte id;
@ -763,6 +765,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
return INDEX;
case 3:
return DELETE;
case 4:
return NO_OP;
default:
throw new IllegalArgumentException("No type mapped for [" + id + "]");
}
@ -786,9 +790,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
// the deserialization logic in Index was identical to that of Create when create was deprecated
return new Index(input);
case DELETE:
return new Translog.Delete(input);
return new Delete(input);
case INDEX:
return new Index(input);
case NO_OP:
return new NoOp(input);
default:
throw new IOException("No type for [" + type + "]");
}
@ -805,6 +811,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
}
public static class Source {
public final BytesReference source;
public final String routing;
public final String parent;
@ -814,9 +821,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
this.routing = routing;
this.parent = parent;
}
}
public static class Index implements Operation {
public static final int FORMAT_2x = 6; // since 2.0-beta1 and 1.1
public static final int FORMAT_AUTO_GENERATED_IDS = 7; // since 5.0.0-beta1
public static final int FORMAT_SEQ_NO = FORMAT_AUTO_GENERATED_IDS + 1; // since 6.0.0
@ -853,8 +862,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
this.autoGeneratedIdTimestamp = IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP;
}
if (format >= FORMAT_SEQ_NO) {
seqNo = in.readVLong();
primaryTerm = in.readVLong();
seqNo = in.readLong();
primaryTerm = in.readLong();
}
}
@ -943,11 +952,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
out.writeOptionalString(routing);
out.writeOptionalString(parent);
out.writeLong(version);
out.writeByte(versionType.getValue());
out.writeLong(autoGeneratedIdTimestamp);
out.writeVLong(seqNo);
out.writeVLong(primaryTerm);
out.writeLong(seqNo);
out.writeLong(primaryTerm);
}
@Override
@ -1004,9 +1013,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
public long getAutoGeneratedIdTimestamp() {
return autoGeneratedIdTimestamp;
}
}
public static class Delete implements Operation {
private static final int FORMAT_5_X = 3;
private static final int FORMAT_SEQ_NO = FORMAT_5_X + 1;
public static final int SERIALIZATION_FORMAT = FORMAT_SEQ_NO;
@ -1127,6 +1138,81 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
"uid=" + uid +
'}';
}
}
public static class NoOp implements Operation {
private final long seqNo;
private final long primaryTerm;
private final String reason;
public long seqNo() {
return seqNo;
}
public long primaryTerm() {
return primaryTerm;
}
public String reason() {
return reason;
}
NoOp(final StreamInput in) throws IOException {
seqNo = in.readLong();
primaryTerm = in.readLong();
reason = in.readString();
}
public NoOp(final long seqNo, final long primaryTerm, final String reason) {
assert seqNo > SequenceNumbersService.NO_OPS_PERFORMED;
assert primaryTerm >= 0;
assert reason != null;
this.seqNo = seqNo;
this.primaryTerm = primaryTerm;
this.reason = reason;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(seqNo);
out.writeLong(primaryTerm);
out.writeString(reason);
}
@Override
public Type opType() {
return Type.NO_OP;
}
@Override
public long estimateSize() {
return 2 * reason.length() + 2 * Long.BYTES;
}
@Override
public Source getSource() {
throw new UnsupportedOperationException("source does not exist for a no-op");
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final NoOp that = (NoOp) obj;
return seqNo == that.seqNo && primaryTerm == that.primaryTerm && reason.equals(that.reason);
}
@Override
public int hashCode() {
return 31 * 31 * 31 + 31 * 31 * Long.hashCode(seqNo) + 31 * Long.hashCode(primaryTerm) + reason().hashCode();
}
}

View File

@ -291,41 +291,51 @@ public class NodeStatsTests extends ESTestCase {
new OsStats.Swap(randomLong(), randomLong()),
new OsStats.Cgroup(
randomAsciiOfLength(8),
randomPositiveLong(),
randomNonNegativeLong(),
randomAsciiOfLength(8),
randomPositiveLong(),
randomPositiveLong(),
new OsStats.Cgroup.CpuStat(randomPositiveLong(), randomPositiveLong(), randomPositiveLong())));
randomNonNegativeLong(),
randomNonNegativeLong(),
new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())));
}
ProcessStats processStats = frequently() ? new ProcessStats(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
new ProcessStats.Cpu(randomShort(), randomPositiveLong()),
new ProcessStats.Mem(randomPositiveLong())) : null;
ProcessStats processStats = frequently() ?
new ProcessStats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
new ProcessStats.Cpu(randomShort(), randomNonNegativeLong()),
new ProcessStats.Mem(randomNonNegativeLong())) :
null;
JvmStats jvmStats = null;
if (frequently()) {
int numMemoryPools = randomIntBetween(0, 10);
List<JvmStats.MemoryPool> memoryPools = new ArrayList<>(numMemoryPools);
for (int i = 0; i < numMemoryPools; i++) {
memoryPools.add(new JvmStats.MemoryPool(randomAsciiOfLengthBetween(3, 10), randomPositiveLong(),
randomPositiveLong(), randomPositiveLong(), randomPositiveLong()));
memoryPools.add(new JvmStats.MemoryPool(randomAsciiOfLengthBetween(3, 10), randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()));
}
JvmStats.Threads threads = new JvmStats.Threads(randomIntBetween(1, 1000), randomIntBetween(1, 1000));
int numGarbageCollectors = randomIntBetween(0, 10);
JvmStats.GarbageCollector[] garbageCollectorsArray = new JvmStats.GarbageCollector[numGarbageCollectors];
for (int i = 0; i < numGarbageCollectors; i++) {
garbageCollectorsArray[i] = new JvmStats.GarbageCollector(randomAsciiOfLengthBetween(3, 10),
randomPositiveLong(), randomPositiveLong());
randomNonNegativeLong(), randomNonNegativeLong());
}
JvmStats.GarbageCollectors garbageCollectors = new JvmStats.GarbageCollectors(garbageCollectorsArray);
int numBufferPools = randomIntBetween(0, 10);
List<JvmStats.BufferPool> bufferPoolList = new ArrayList<>();
for (int i = 0; i < numBufferPools; i++) {
bufferPoolList.add(new JvmStats.BufferPool(randomAsciiOfLengthBetween(3, 10), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong()));
bufferPoolList.add(
new JvmStats.BufferPool(
randomAsciiOfLengthBetween(3, 10),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()));
}
JvmStats.Classes classes = new JvmStats.Classes(randomPositiveLong(), randomPositiveLong(), randomPositiveLong());
jvmStats = frequently() ? new JvmStats(randomPositiveLong(), randomPositiveLong(), new JvmStats.Mem(randomPositiveLong(),
randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), memoryPools), threads,
garbageCollectors, randomBoolean() ? Collections.emptyList() : bufferPoolList, classes) : null;
JvmStats.Classes classes = new JvmStats.Classes(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong());
jvmStats =
frequently() ? new JvmStats(randomNonNegativeLong(), randomNonNegativeLong(), new JvmStats.Mem(randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), memoryPools),
threads, garbageCollectors, randomBoolean() ? Collections.emptyList() : bufferPoolList, classes) : null;
}
ThreadPoolStats threadPoolStats = null;
if (frequently()) {
@ -333,7 +343,7 @@ public class NodeStatsTests extends ESTestCase {
List<ThreadPoolStats.Stats> threadPoolStatsList = new ArrayList<>();
for (int i = 0; i < numThreadPoolStats; i++) {
threadPoolStatsList.add(new ThreadPoolStats.Stats(randomAsciiOfLengthBetween(3, 10), randomIntBetween(1, 1000),
randomIntBetween(1, 1000), randomIntBetween(1, 1000), randomPositiveLong(),
randomIntBetween(1, 1000), randomIntBetween(1, 1000), randomNonNegativeLong(),
randomIntBetween(1, 1000), randomIntBetween(1, 1000)));
}
threadPoolStats = new ThreadPoolStats(threadPoolStatsList);
@ -345,50 +355,51 @@ public class NodeStatsTests extends ESTestCase {
for (int i = 0; i < numDeviceStats; i++) {
FsInfo.DeviceStats previousDeviceStats = randomBoolean() ? null :
new FsInfo.DeviceStats(randomInt(), randomInt(), randomAsciiOfLengthBetween(3, 10),
randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), null);
deviceStatsArray[i] = new FsInfo.DeviceStats(randomInt(), randomInt(), randomAsciiOfLengthBetween(3, 10),
randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), randomPositiveLong(), previousDeviceStats);
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), null);
deviceStatsArray[i] =
new FsInfo.DeviceStats(randomInt(), randomInt(), randomAsciiOfLengthBetween(3, 10), randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), previousDeviceStats);
}
FsInfo.IoStats ioStats = new FsInfo.IoStats(deviceStatsArray);
int numPaths = randomIntBetween(0, 10);
FsInfo.Path[] paths = new FsInfo.Path[numPaths];
for (int i = 0; i < numPaths; i++) {
paths[i] = new FsInfo.Path(randomAsciiOfLengthBetween(3, 10), randomBoolean() ? randomAsciiOfLengthBetween(3, 10) : null,
randomPositiveLong(), randomPositiveLong(), randomPositiveLong());
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong());
}
fsInfo = new FsInfo(randomPositiveLong(), ioStats, paths);
fsInfo = new FsInfo(randomNonNegativeLong(), ioStats, paths);
}
TransportStats transportStats = frequently() ? new TransportStats(randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomPositiveLong(), randomPositiveLong()) : null;
HttpStats httpStats = frequently() ? new HttpStats(randomPositiveLong(), randomPositiveLong()) : null;
TransportStats transportStats = frequently() ? new TransportStats(randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()) : null;
HttpStats httpStats = frequently() ? new HttpStats(randomNonNegativeLong(), randomNonNegativeLong()) : null;
AllCircuitBreakerStats allCircuitBreakerStats = null;
if (frequently()) {
int numCircuitBreakerStats = randomIntBetween(0, 10);
CircuitBreakerStats[] circuitBreakerStatsArray = new CircuitBreakerStats[numCircuitBreakerStats];
for (int i = 0; i < numCircuitBreakerStats; i++) {
circuitBreakerStatsArray[i] = new CircuitBreakerStats(randomAsciiOfLengthBetween(3, 10), randomPositiveLong(),
randomPositiveLong(), randomDouble(), randomPositiveLong());
circuitBreakerStatsArray[i] = new CircuitBreakerStats(randomAsciiOfLengthBetween(3, 10), randomNonNegativeLong(),
randomNonNegativeLong(), randomDouble(), randomNonNegativeLong());
}
allCircuitBreakerStats = new AllCircuitBreakerStats(circuitBreakerStatsArray);
}
ScriptStats scriptStats = frequently() ? new ScriptStats(randomPositiveLong(), randomPositiveLong()) : null;
ScriptStats scriptStats = frequently() ? new ScriptStats(randomNonNegativeLong(), randomNonNegativeLong()) : null;
DiscoveryStats discoveryStats = frequently() ? new DiscoveryStats(randomBoolean() ? new PendingClusterStateStats(randomInt(),
randomInt(), randomInt()) : null) : null;
IngestStats ingestStats = null;
if (frequently()) {
IngestStats.Stats totalStats = new IngestStats.Stats(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong());
IngestStats.Stats totalStats = new IngestStats.Stats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong());
int numStatsPerPipeline = randomIntBetween(0, 10);
Map<String, IngestStats.Stats> statsPerPipeline = new HashMap<>();
for (int i = 0; i < numStatsPerPipeline; i++) {
statsPerPipeline.put(randomAsciiOfLengthBetween(3, 10), new IngestStats.Stats(randomPositiveLong(),
randomPositiveLong(), randomPositiveLong(), randomPositiveLong()));
statsPerPipeline.put(randomAsciiOfLengthBetween(3, 10), new IngestStats.Stats(randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()));
}
ingestStats = new IngestStats(totalStats, statsPerPipeline);
}
//TODO NodeIndicesStats are not tested here, way too complicated to create, also they need to be migrated to Writeable yet
return new NodeStats(node, randomPositiveLong(), null, osStats, processStats, jvmStats, threadPoolStats, fsInfo,
return new NodeStats(node, randomNonNegativeLong(), null, osStats, processStats, jvmStats, threadPoolStats, fsInfo,
transportStats, httpStats, allCircuitBreakerStats, scriptStats, discoveryStats, ingestStats);
}
}

View File

@ -292,7 +292,7 @@ public class UpdateRequestTests extends ESTestCase {
.upsert(indexRequest)
.script(new Script(ScriptType.INLINE, "mock", "ctx._source.update_timestamp = ctx._now", Collections.emptyMap()))
.scriptedUpsert(true);
long nowInMillis = randomPositiveLong();
long nowInMillis = randomNonNegativeLong();
// We simulate that the document is not existing yet
GetResult getResult = new GetResult("test", "type1", "2", 0, false, null, null);
UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> nowInMillis);

View File

@ -95,7 +95,7 @@ public class FieldMemoryStatsTests extends ESTestCase {
ObjectLongHashMap<String> map = new ObjectLongHashMap<>();
int keys = randomIntBetween(1, 1000);
for (int i = 0; i < keys; i++) {
map.put(randomRealisticUnicodeOfCodepointLengthBetween(1, 10), randomPositiveLong());
map.put(randomRealisticUnicodeOfCodepointLengthBetween(1, 10), randomNonNegativeLong());
}
return new FieldMemoryStats(map);
}

View File

@ -150,7 +150,7 @@ public class ByteSizeValueTests extends ESTestCase {
}
public void testCompareEquality() {
long firstRandom = randomPositiveLong();
long firstRandom = randomNonNegativeLong();
ByteSizeUnit randomUnit = randomFrom(ByteSizeUnit.values());
ByteSizeValue firstByteValue = new ByteSizeValue(firstRandom, randomUnit);
ByteSizeValue secondByteValue = new ByteSizeValue(firstRandom, randomUnit);
@ -158,8 +158,8 @@ public class ByteSizeValueTests extends ESTestCase {
}
public void testCompareValue() {
long firstRandom = randomPositiveLong();
long secondRandom = randomValueOtherThan(firstRandom, ESTestCase::randomPositiveLong);
long firstRandom = randomNonNegativeLong();
long secondRandom = randomValueOtherThan(firstRandom, ESTestCase::randomNonNegativeLong);
ByteSizeUnit unit = randomFrom(ByteSizeUnit.values());
ByteSizeValue firstByteValue = new ByteSizeValue(firstRandom, unit);
ByteSizeValue secondByteValue = new ByteSizeValue(secondRandom, unit);
@ -168,7 +168,7 @@ public class ByteSizeValueTests extends ESTestCase {
}
public void testCompareUnits() {
long number = randomPositiveLong();
long number = randomNonNegativeLong();
ByteSizeUnit randomUnit = randomValueOtherThan(ByteSizeUnit.PB, ()->randomFrom(ByteSizeUnit.values()));
ByteSizeValue firstByteValue = new ByteSizeValue(number, randomUnit);
ByteSizeValue secondByteValue = new ByteSizeValue(number, ByteSizeUnit.PB);
@ -189,7 +189,7 @@ public class ByteSizeValueTests extends ESTestCase {
}
public void testSerialization() throws IOException {
ByteSizeValue byteSizeValue = new ByteSizeValue(randomPositiveLong(), randomFrom(ByteSizeUnit.values()));
ByteSizeValue byteSizeValue = new ByteSizeValue(randomNonNegativeLong(), randomFrom(ByteSizeUnit.values()));
try (BytesStreamOutput out = new BytesStreamOutput()) {
byteSizeValue.writeTo(out);
try (StreamInput in = out.bytes().streamInput()) {

View File

@ -66,7 +66,7 @@ public class SizeValueTests extends ESTestCase {
}
public void testCompareEquality() {
long randomValue = randomPositiveLong();
long randomValue = randomNonNegativeLong();
SizeUnit randomUnit = randomFrom(SizeUnit.values());
SizeValue firstValue = new SizeValue(randomValue, randomUnit);
SizeValue secondValue = new SizeValue(randomValue, randomUnit);
@ -74,8 +74,8 @@ public class SizeValueTests extends ESTestCase {
}
public void testCompareValue() {
long firstRandom = randomPositiveLong();
long secondRandom = randomValueOtherThan(firstRandom, ESTestCase::randomPositiveLong);
long firstRandom = randomNonNegativeLong();
long secondRandom = randomValueOtherThan(firstRandom, ESTestCase::randomNonNegativeLong);
SizeUnit unit = randomFrom(SizeUnit.values());
SizeValue firstSizeValue = new SizeValue(firstRandom, unit);
SizeValue secondSizeValue = new SizeValue(secondRandom, unit);
@ -84,7 +84,7 @@ public class SizeValueTests extends ESTestCase {
}
public void testCompareUnits() {
long number = randomPositiveLong();
long number = randomNonNegativeLong();
SizeUnit randomUnit = randomValueOtherThan(SizeUnit.PETA, ()->randomFrom(SizeUnit.values()));
SizeValue firstValue = new SizeValue(number, randomUnit);
SizeValue secondValue = new SizeValue(number, SizeUnit.PETA);

View File

@ -226,7 +226,7 @@ public class TimeValueTests extends ESTestCase {
}
public void testCompareEquality() {
long randomLong = randomPositiveLong();
long randomLong = randomNonNegativeLong();
TimeUnit randomUnit = randomFrom(TimeUnit.values());
TimeValue firstValue = new TimeValue(randomLong, randomUnit);
TimeValue secondValue = new TimeValue(randomLong, randomUnit);
@ -234,8 +234,8 @@ public class TimeValueTests extends ESTestCase {
}
public void testCompareValue() {
long firstRandom = randomPositiveLong();
long secondRandom = randomValueOtherThan(firstRandom, ESTestCase::randomPositiveLong);
long firstRandom = randomNonNegativeLong();
long secondRandom = randomValueOtherThan(firstRandom, ESTestCase::randomNonNegativeLong);
TimeUnit unit = randomFrom(TimeUnit.values());
TimeValue firstValue = new TimeValue(firstRandom, unit);
TimeValue secondValue = new TimeValue(secondRandom, unit);
@ -244,7 +244,7 @@ public class TimeValueTests extends ESTestCase {
}
public void testCompareUnits() {
long number = randomPositiveLong();
long number = randomNonNegativeLong();
TimeUnit randomUnit = randomValueOtherThan(TimeUnit.DAYS, ()->randomFrom(TimeUnit.values()));
TimeValue firstValue = new TimeValue(number, randomUnit);
TimeValue secondValue = new TimeValue(number, TimeUnit.DAYS);

View File

@ -66,7 +66,8 @@ public class ElectMasterServiceTests extends ESTestCase {
roles.add(DiscoveryNode.Role.MASTER);
DiscoveryNode node = new DiscoveryNode("n_" + i, "n_" + i, buildNewFakeTransportAddress(), Collections.emptyMap(),
roles, Version.CURRENT);
candidates.add(new MasterCandidate(node, randomBoolean() ? MasterCandidate.UNRECOVERED_CLUSTER_VERSION : randomPositiveLong()));
candidates.add(
new MasterCandidate(node, randomBoolean() ? MasterCandidate.UNRECOVERED_CLUSTER_VERSION : randomNonNegativeLong()));
}
Collections.shuffle(candidates, random());

View File

@ -168,8 +168,8 @@ public class UnicastZenPingTests extends ESTestCase {
NetworkHandle handleD = startServices(settingsMismatch, threadPool, "UZP_D", versionD, supplier);
closeables.push(handleD.transportService);
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomPositiveLong()).build();
final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomPositiveLong()).build();
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build();
final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomNonNegativeLong()).build();
Settings hostsSettings = Settings.builder()
.putArray("discovery.zen.ping.unicast.hosts",
@ -329,7 +329,7 @@ public class UnicastZenPingTests extends ESTestCase {
.put("cluster.name", "test")
.build();
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomPositiveLong()).build();
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build();
final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER);
zenPingA.start(new PingContextProvider() {
@ -567,7 +567,7 @@ public class UnicastZenPingTests extends ESTestCase {
hostsSettingsBuilder.put("discovery.zen.ping.unicast.hosts", (String) null);
}
final Settings hostsSettings = hostsSettingsBuilder.build();
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomPositiveLong()).build();
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build();
// connection to reuse
handleA.transportService.connectToNode(handleB.node);
@ -639,7 +639,7 @@ public class UnicastZenPingTests extends ESTestCase {
.put("cluster.name", "test")
.put("discovery.zen.ping.unicast.hosts", (String) null) // use nodes for simplicity
.build();
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomPositiveLong()).build();
final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build();
final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER);
zenPingA.start(new PingContextProvider() {

View File

@ -619,54 +619,54 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
switch (type) {
case 0:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Long(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean());
return new FieldStats.Long(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Long(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean(), randomLong(), randomLong());
return new FieldStats.Long(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(), randomLong(), randomLong());
}
case 1:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Double(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean());
return new FieldStats.Double(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Double(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean(), randomDouble(), randomDouble());
return new FieldStats.Double(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(), randomDouble(), randomDouble());
}
case 2:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Date(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean());
return new FieldStats.Date(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Date(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean(), Joda.forPattern("basicDate"),
return new FieldStats.Date(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(), Joda.forPattern("basicDate"),
new Date().getTime(), new Date().getTime());
}
case 3:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Text(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean());
return new FieldStats.Text(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Text(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean(),
return new FieldStats.Text(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(),
new BytesRef(randomAsciiOfLength(10)), new BytesRef(randomAsciiOfLength(20)));
}
case 4:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Ip(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean());
return new FieldStats.Ip(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Ip(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean(),
return new FieldStats.Ip(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(),
InetAddress.getByName("::1"), InetAddress.getByName("::1"));
}
case 5:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Ip(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean());
return new FieldStats.Ip(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Ip(randomPositiveLong(), randomPositiveLong(), randomPositiveLong(),
randomPositiveLong(), randomBoolean(), randomBoolean(),
return new FieldStats.Ip(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(),
InetAddress.getByName("1.2.3.4"), InetAddress.getByName("1.2.3.4"));
}
default:

View File

@ -157,6 +157,7 @@ import java.util.function.LongSupplier;
import java.util.function.Supplier;
import static java.util.Collections.emptyMap;
import static java.util.Collections.max;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY;
@ -3123,6 +3124,50 @@ public class InternalEngineTests extends ESTestCase {
}
}
/*
* This test tests that a no-op does not generate a new sequence number, that no-ops can advance the local checkpoint, and that no-ops
* are correctly added to the translog.
*/
public void testNoOps() throws IOException {
engine.close();
InternalEngine noOpEngine = null;
final int maxSeqNo = randomIntBetween(0, 128);
final int localCheckpoint = randomIntBetween(0, maxSeqNo);
final int globalCheckpoint = randomIntBetween(0, localCheckpoint);
try {
final SequenceNumbersService seqNoService =
new SequenceNumbersService(shardId, defaultSettings, maxSeqNo, localCheckpoint, globalCheckpoint) {
@Override
public long generateSeqNo() {
throw new UnsupportedOperationException();
}
};
noOpEngine = createEngine(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null, () -> seqNoService);
final long primaryTerm = randomNonNegativeLong();
final String reason = randomAsciiOfLength(16);
noOpEngine.noOp(
new Engine.NoOp(
null,
maxSeqNo + 1,
primaryTerm,
0,
VersionType.INTERNAL,
randomFrom(PRIMARY, REPLICA, PEER_RECOVERY, LOCAL_TRANSLOG_RECOVERY),
System.nanoTime(),
reason));
assertThat(noOpEngine.seqNoService().getLocalCheckpoint(), equalTo((long) (maxSeqNo + 1)));
assertThat(noOpEngine.getTranslog().totalOperations(), equalTo(1));
final Translog.Operation op = noOpEngine.getTranslog().newSnapshot().next();
assertThat(op, instanceOf(Translog.NoOp.class));
final Translog.NoOp noOp = (Translog.NoOp) op;
assertThat(noOp.seqNo(), equalTo((long) (maxSeqNo + 1)));
assertThat(noOp.primaryTerm(), equalTo(primaryTerm));
assertThat(noOp.reason(), equalTo(reason));
} finally {
IOUtils.close(noOpEngine);
}
}
/**
* Return a tuple representing the sequence ID for the given {@code Get}
* operation. The first value in the tuple is the sequence number, the

View File

@ -30,7 +30,7 @@ public class FieldDataStatsTests extends ESTestCase {
public void testSerialize() throws IOException {
FieldMemoryStats map = randomBoolean() ? null : FieldMemoryStatsTests.randomFieldMemoryStats();
FieldDataStats stats = new FieldDataStats(randomPositiveLong(), randomPositiveLong(), map == null ? null :
FieldDataStats stats = new FieldDataStats(randomNonNegativeLong(), randomNonNegativeLong(), map == null ? null :
map);
BytesStreamOutput out = new BytesStreamOutput();
stats.writeTo(out);

View File

@ -96,7 +96,7 @@ public class GetResultTests extends ESTestCase {
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15), getResult.getVersion(),
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), randomPositiveLong(),
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), randomNonNegativeLong(),
getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()));
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(),
getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields()));
@ -117,7 +117,7 @@ public class GetResultTests extends ESTestCase {
Map<String, GetField> fields = null;
Map<String, GetField> expectedFields = null;
if (frequently()) {
version = randomPositiveLong();
version = randomNonNegativeLong();
exists = true;
if (frequently()) {
source = RandomObjects.randomSource(random());

View File

@ -67,7 +67,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
((DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA));
}
});
nowInMillis = randomPositiveLong();
nowInMillis = randomNonNegativeLong();
}
public void testIsFieldWithinQueryEmptyReader() throws IOException {

View File

@ -48,7 +48,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
@Before
public void setupProperties() {
type = RandomPicks.randomFrom(random(), RangeType.values());
nowInMillis = randomPositiveLong();
nowInMillis = randomNonNegativeLong();
if (type == RangeType.DATE) {
addModifier(new Modifier("format", true) {
@Override

View File

@ -46,7 +46,7 @@ public class QueryShardContextTests extends ESTestCase {
IndexSettings indexSettings = new IndexSettings(indexMetadata.build(), Settings.EMPTY);
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
final long nowInMillis = randomPositiveLong();
final long nowInMillis = randomNonNegativeLong();
QueryShardContext context = new QueryShardContext(
0, indexSettings, null, null, mapperService, null, null, xContentRegistry(), null, null,
() -> nowInMillis);

View File

@ -36,10 +36,7 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Collections;
import java.util.HashSet;
import static org.elasticsearch.mock.orig.Mockito.when;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
@ -103,7 +100,8 @@ public class GlobalCheckpointSyncActionTests extends ESTestCase {
if (randomBoolean()) {
action.shardOperationOnPrimary(primaryRequest, indexShard);
} else {
action.shardOperationOnReplica(new GlobalCheckpointSyncAction.ReplicaRequest(primaryRequest, randomPositiveLong()), indexShard);
action.shardOperationOnReplica(
new GlobalCheckpointSyncAction.ReplicaRequest(primaryRequest, randomNonNegativeLong()), indexShard);
}
verify(translog).sync();

View File

@ -31,7 +31,7 @@ public class CompletionsStatsTests extends ESTestCase {
public void testSerialize() throws IOException {
FieldMemoryStats map = randomBoolean() ? null : FieldMemoryStatsTests.randomFieldMemoryStats();
CompletionStats stats = new CompletionStats(randomPositiveLong(), map == null ? null :
CompletionStats stats = new CompletionStats(randomNonNegativeLong(), map == null ? null :
map);
BytesStreamOutput out = new BytesStreamOutput();
stats.writeTo(out);

View File

@ -31,7 +31,6 @@ import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -88,9 +87,11 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasToString;
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
public class TranslogTests extends ESTestCase {
@ -226,17 +227,28 @@ public class TranslogTests extends ESTestCase {
assertThat(snapshot, SnapshotMatchers.equalsTo(ops));
assertThat(snapshot.totalOperations(), equalTo(ops.size()));
final long seqNo = randomNonNegativeLong();
final long primaryTerm = randomNonNegativeLong();
final String reason = randomAsciiOfLength(16);
addToTranslogAndList(translog, ops, new Translog.NoOp(seqNo, primaryTerm, reason));
snapshot = translog.newSnapshot();
Translog.Index index = (Translog.Index) snapshot.next();
assertThat(index != null, equalTo(true));
assertNotNull(index);
assertThat(BytesReference.toBytes(index.source()), equalTo(new byte[]{1}));
Translog.Delete delete = (Translog.Delete) snapshot.next();
assertThat(delete != null, equalTo(true));
assertNotNull(delete);
assertThat(delete.uid(), equalTo(newUid("2")));
assertThat(snapshot.next(), equalTo(null));
Translog.NoOp noOp = (Translog.NoOp) snapshot.next();
assertNotNull(noOp);
assertThat(noOp.seqNo(), equalTo(seqNo));
assertThat(noOp.primaryTerm(), equalTo(primaryTerm));
assertThat(noOp.reason(), equalTo(reason));
assertNull(snapshot.next());
long firstId = translog.currentFileGeneration();
translog.prepareCommit();
@ -268,68 +280,104 @@ public class TranslogTests extends ESTestCase {
public void testStats() throws IOException {
final long firstOperationPosition = translog.getFirstOperationPosition();
TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(0L));
long lastSize = stats.getTranslogSizeInBytes();
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(0L));
}
assertThat((int) firstOperationPosition, greaterThan(CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC)));
assertThat(lastSize, equalTo(firstOperationPosition));
TranslogStats total = new TranslogStats();
translog.add(new Translog.Index("test", "1", new byte[]{1}));
stats = stats();
total.add(stats);
assertThat(stats.estimatedNumberOfOperations(), equalTo(1L));
assertThat(stats.getTranslogSizeInBytes(), greaterThan(lastSize));
lastSize = stats.getTranslogSizeInBytes();
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(1L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(97L));
}
translog.add(new Translog.Delete(newUid("2")));
stats = stats();
total.add(stats);
assertThat(stats.estimatedNumberOfOperations(), equalTo(2L));
assertThat(stats.getTranslogSizeInBytes(), greaterThan(lastSize));
lastSize = stats.getTranslogSizeInBytes();
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(2L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(125L));
}
translog.add(new Translog.Delete(newUid("3")));
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(3L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(153L));
}
final long seqNo = 1;
final long primaryTerm = 1;
translog.add(new Translog.NoOp(seqNo, primaryTerm, randomAsciiOfLength(16)));
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(4L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(195L));
}
final long expectedSizeInBytes = 238L;
translog.prepareCommit();
stats = stats();
total.add(stats);
assertThat(stats.estimatedNumberOfOperations(), equalTo(3L));
assertThat(stats.getTranslogSizeInBytes(), greaterThan(lastSize));
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(4L));
assertThat(
stats.getTranslogSizeInBytes(),
equalTo(expectedSizeInBytes));
}
{
final TranslogStats stats = stats();
final BytesStreamOutput out = new BytesStreamOutput();
stats.writeTo(out);
final TranslogStats copy = new TranslogStats();
copy.readFrom(out.bytes().streamInput());
assertThat(copy.estimatedNumberOfOperations(), equalTo(4L));
assertThat(copy.getTranslogSizeInBytes(), equalTo(expectedSizeInBytes));
try (final XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.startObject();
copy.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
assertThat(builder.string(), equalTo("{\"translog\":{\"operations\":4,\"size_in_bytes\":" + expectedSizeInBytes + "}}"));
}
}
translog.commit();
stats = stats();
total.add(stats);
assertThat(stats.estimatedNumberOfOperations(), equalTo(0L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(firstOperationPosition));
assertEquals(6, total.estimatedNumberOfOperations());
assertEquals(419, total.getTranslogSizeInBytes());
{
final TranslogStats stats = stats();
assertThat(stats.estimatedNumberOfOperations(), equalTo(0L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(firstOperationPosition));
}
}
BytesStreamOutput out = new BytesStreamOutput();
total.writeTo(out);
TranslogStats copy = new TranslogStats();
copy.readFrom(out.bytes().streamInput());
assertEquals(6, copy.estimatedNumberOfOperations());
assertEquals(419, copy.getTranslogSizeInBytes());
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.startObject();
copy.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
assertEquals("{\"translog\":{\"operations\":6,\"size_in_bytes\":419}}", builder.string());
public void testTotalTests() {
final TranslogStats total = new TranslogStats();
final int n = randomIntBetween(0, 16);
final List<TranslogStats> statsList = new ArrayList<>(n);
for (int i = 0; i < n; i++) {
final TranslogStats stats = new TranslogStats(randomIntBetween(1, 4096), randomIntBetween(1, 1 << 20));
statsList.add(stats);
total.add(stats);
}
try {
new TranslogStats(1, -1);
fail("must be positive");
} catch (IllegalArgumentException ex) {
//all well
}
try {
new TranslogStats(-1, 1);
fail("must be positive");
} catch (IllegalArgumentException ex) {
//all well
}
assertThat(
total.estimatedNumberOfOperations(),
equalTo(statsList.stream().mapToLong(TranslogStats::estimatedNumberOfOperations).sum()));
assertThat(
total.getTranslogSizeInBytes(),
equalTo(statsList.stream().mapToLong(TranslogStats::getTranslogSizeInBytes).sum()));
}
public void testNegativeNumberOfOperations() {
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new TranslogStats(-1, 1));
assertThat(e, hasToString(containsString("numberOfOperations must be >= 0")));
}
public void testNegativeSizeInBytes() {
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new TranslogStats(1, -1));
assertThat(e, hasToString(containsString("translogSizeInBytes must be >= 0")));
}
public void testSnapshot() throws IOException {
@ -465,8 +513,15 @@ public class TranslogTests extends ESTestCase {
assertEquals(expDelOp.version(), delOp.version());
assertEquals(expDelOp.versionType(), delOp.versionType());
break;
case NO_OP:
final Translog.NoOp noOp = (Translog.NoOp) op;
final Translog.NoOp expectedNoOp = (Translog.NoOp) expectedOp;
assertThat(noOp.seqNo(), equalTo(expectedNoOp.seqNo()));
assertThat(noOp.primaryTerm(), equalTo(expectedNoOp.primaryTerm()));
assertThat(noOp.reason(), equalTo(expectedNoOp.reason()));
break;
default:
throw new ElasticsearchException("unsupported opType");
throw new AssertionError("unsupported operation type [" + op.opType() + "]");
}
}
@ -607,7 +662,9 @@ public class TranslogTests extends ESTestCase {
while (run.get()) {
long id = idGenerator.incrementAndGet();
final Translog.Operation op;
switch (Translog.Operation.Type.values()[((int) (id % Translog.Operation.Type.values().length))]) {
final Translog.Operation.Type type =
Translog.Operation.Type.values()[((int) (id % Translog.Operation.Type.values().length))];
switch (type) {
case CREATE:
case INDEX:
op = new Translog.Index("type", "" + id, new byte[]{(byte) id});
@ -615,8 +672,11 @@ public class TranslogTests extends ESTestCase {
case DELETE:
op = new Translog.Delete(newUid("" + id));
break;
case NO_OP:
op = new Translog.NoOp(id, id, Long.toString(id));
break;
default:
throw new ElasticsearchException("unknown type");
throw new AssertionError("unsupported operation type [" + type + "]");
}
Translog.Location location = translog.add(op);
Translog.Location existing = writtenOps.put(op, location);
@ -1137,7 +1197,7 @@ public class TranslogTests extends ESTestCase {
try (Translog ignored = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) {
fail("corrupted");
} catch (IllegalStateException ex) {
assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=2353, numOps=55, translogFileGeneration=2, globalCheckpoint=-2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration=0, globalCheckpoint=-2}");
assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=3123, numOps=55, translogFileGeneration=2, globalCheckpoint=-2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration=0, globalCheckpoint=-2}");
}
Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
try (Translog translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) {
@ -1293,7 +1353,8 @@ public class TranslogTests extends ESTestCase {
downLatch.await();
for (int opCount = 0; opCount < opsPerThread; opCount++) {
Translog.Operation op;
switch (randomFrom(Translog.Operation.Type.values())) {
final Translog.Operation.Type type = randomFrom(Translog.Operation.Type.values());
switch (type) {
case CREATE:
case INDEX:
op = new Translog.Index("test", threadId + "_" + opCount,
@ -1307,8 +1368,11 @@ public class TranslogTests extends ESTestCase {
1 + randomInt(100000),
randomFrom(VersionType.values()));
break;
case NO_OP:
op = new Translog.NoOp(randomNonNegativeLong(), randomNonNegativeLong(), randomAsciiOfLength(16));
break;
default:
throw new ElasticsearchException("not supported op type");
throw new AssertionError("unsupported operation type [" + type + "]");
}
Translog.Location loc = add(op);

View File

@ -41,7 +41,7 @@ public class OsProbeTests extends ESTestCase {
public void testOsInfo() {
int allocatedProcessors = randomIntBetween(1, Runtime.getRuntime().availableProcessors());
long refreshInterval = randomBoolean() ? -1 : randomPositiveLong();
long refreshInterval = randomBoolean() ? -1 : randomNonNegativeLong();
OsInfo info = probe.osInfo(refreshInterval, allocatedProcessors);
assertNotNull(info);
assertEquals(refreshInterval, info.getRefreshInterval());

View File

@ -38,11 +38,11 @@ public class OsStatsTests extends ESTestCase {
OsStats.Swap swap = new OsStats.Swap(randomLong(), randomLong());
OsStats.Cgroup cgroup = new OsStats.Cgroup(
randomAsciiOfLength(8),
randomPositiveLong(),
randomNonNegativeLong(),
randomAsciiOfLength(8),
randomPositiveLong(),
randomPositiveLong(),
new OsStats.Cgroup.CpuStat(randomPositiveLong(), randomPositiveLong(), randomPositiveLong()));
randomNonNegativeLong(),
randomNonNegativeLong(),
new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()));
OsStats osStats = new OsStats(System.currentTimeMillis(), cpu, mem, swap, cgroup);
try (BytesStreamOutput out = new BytesStreamOutput()) {

View File

@ -36,7 +36,7 @@ public class ProcessProbeTests extends ESTestCase {
private final ProcessProbe probe = ProcessProbe.getInstance();
public void testProcessInfo() {
long refreshInterval = randomPositiveLong();
long refreshInterval = randomNonNegativeLong();
ProcessInfo info = probe.processInfo(refreshInterval);
assertNotNull(info);
assertEquals(refreshInterval, info.getRefreshInterval());

View File

@ -112,13 +112,13 @@ public class NodeInfoStreamingTests extends ESTestCase {
if (randomBoolean()) {
int availableProcessors = randomIntBetween(1, 64);
int allocatedProcessors = randomIntBetween(1, availableProcessors);
long refreshInterval = randomBoolean() ? -1 : randomPositiveLong();
long refreshInterval = randomBoolean() ? -1 : randomNonNegativeLong();
String name = randomAsciiOfLengthBetween(3, 10);
String arch = randomAsciiOfLengthBetween(3, 10);
String version = randomAsciiOfLengthBetween(3, 10);
osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, arch, version);
}
ProcessInfo process = randomBoolean() ? null : new ProcessInfo(randomInt(), randomBoolean(), randomPositiveLong());
ProcessInfo process = randomBoolean() ? null : new ProcessInfo(randomInt(), randomBoolean(), randomNonNegativeLong());
JvmInfo jvm = randomBoolean() ? null : JvmInfo.jvmInfo();
ThreadPoolInfo threadPoolInfo = null;
if (randomBoolean()) {

View File

@ -95,7 +95,7 @@ public class SearchRequestTests extends AbstractSearchTestCase {
mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAsciiOfLengthBetween(3, 10))));
mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), () -> randomBoolean()))));
mutators.add(() -> mutation
.scroll(randomValueOtherThan(searchRequest.scroll(), () -> new Scroll(new TimeValue(randomPositiveLong() % 100000)))));
.scroll(randomValueOtherThan(searchRequest.scroll(), () -> new Scroll(new TimeValue(randomNonNegativeLong() % 100000)))));
mutators.add(() -> mutation.searchType(randomValueOtherThan(searchRequest.searchType(), () -> randomFrom(SearchType.values()))));
mutators.add(() -> mutation.source(randomValueOtherThan(searchRequest.source(), this::createSearchSourceBuilder)));
randomFrom(mutators).run();

View File

@ -198,7 +198,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
.putAlias(AliasMetaData.newAliasMetaDataBuilder("UjLlLkjwWh").filter("{\"term\" : {\"foo\" : \"bar1\"}}"))
.putAlias(AliasMetaData.newAliasMetaDataBuilder("uBpgtwuqDG").filter("{\"term\" : {\"foo\" : \"bar2\"}}"));
IndexSettings indexSettings = new IndexSettings(indexMetadata.build(), Settings.EMPTY);
final long nowInMillis = randomPositiveLong();
final long nowInMillis = randomNonNegativeLong();
QueryShardContext context = new QueryShardContext(
0, indexSettings, null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis);
readRequest.rewrite(context);

View File

@ -133,7 +133,7 @@ public class QueryRescoreBuilderTests extends ESTestCase {
* than the test builder
*/
public void testBuildRescoreSearchContext() throws ElasticsearchParseException, IOException {
final long nowInMillis = randomPositiveLong();
final long nowInMillis = randomNonNegativeLong();
Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings);

View File

@ -208,7 +208,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
public void onCache(ShardId shardId, Accountable accountable) {
}
});
long nowInMillis = randomPositiveLong();
long nowInMillis = randomNonNegativeLong();
return new QueryShardContext(0, idxSettings, bitsetFilterCache, ifds, null, null, scriptService,
xContentRegistry(), null, null, () -> nowInMillis) {
@Override

View File

@ -217,7 +217,7 @@ public class RoundTripTests extends ESTestCase {
}
public void testReindexResponse() throws IOException {
BulkIndexByScrollResponse response = new BulkIndexByScrollResponse(timeValueMillis(randomPositiveLong()), randomStatus(),
BulkIndexByScrollResponse response = new BulkIndexByScrollResponse(timeValueMillis(randomNonNegativeLong()), randomStatus(),
randomIndexingFailures(), randomSearchFailures(), randomBoolean());
BulkIndexByScrollResponse tripped = new BulkIndexByScrollResponse();
roundTrip(response, tripped);
@ -225,7 +225,7 @@ public class RoundTripTests extends ESTestCase {
}
public void testBulkIndexByScrollResponse() throws IOException {
BulkIndexByScrollResponse response = new BulkIndexByScrollResponse(timeValueMillis(randomPositiveLong()), randomStatus(),
BulkIndexByScrollResponse response = new BulkIndexByScrollResponse(timeValueMillis(randomNonNegativeLong()), randomStatus(),
randomIndexingFailures(), randomSearchFailures(), randomBoolean());
BulkIndexByScrollResponse tripped = new BulkIndexByScrollResponse();
roundTrip(response, tripped);

View File

@ -60,7 +60,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb.gz")),
geoIpConfigDir.resolve("GeoLite2-Country.mmdb.gz"));
NodeCache cache = randomFrom(NoCache.getInstance(), new GeoIpCache(randomPositiveLong()));
NodeCache cache = randomFrom(NoCache.getInstance(), new GeoIpCache(randomNonNegativeLong()));
databaseReaders = IngestGeoIpPlugin.loadDatabaseReaders(geoIpConfigDir, cache);
}

View File

@ -1023,7 +1023,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
private final BitsetFilterCache bitsetFilterCache;
private final ScriptService scriptService;
private final Client client;
private final long nowInMillis = randomPositiveLong();
private final long nowInMillis = randomNonNegativeLong();
ServiceHolder(Settings nodeSettings, Settings indexSettings,
Collection<Class<? extends Plugin>> plugins, AbstractQueryTestCase<?> testCase) throws IOException {

View File

@ -441,7 +441,7 @@ public abstract class ESTestCase extends LuceneTestCase {
return random().nextInt();
}
public static long randomPositiveLong() {
public static long randomNonNegativeLong() {
long randomLong;
do {
randomLong = randomLong();

View File

@ -33,7 +33,7 @@ import org.elasticsearch.test.TestSearchContext;
public class MockSearchServiceTests extends ESTestCase {
public void testAssertNoInFlightContext() {
final long nowInMillis = randomPositiveLong();
final long nowInMillis = randomNonNegativeLong();
SearchContext s = new TestSearchContext(new QueryShardContext(0, new IndexSettings(IndexMetaData.PROTO, Settings.EMPTY), null, null,
null, null, null, xContentRegistry(), null, null, () -> nowInMillis)) {
@Override