diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index d16383cb87a..7c5803e19c3 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -1084,13 +1084,6 @@ - - - - - - - diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 9335c4ac403..0b64f3afa7d 100644 --- a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -40,16 +40,21 @@ import java.util.Locale; */ public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContent { - public enum Operation implements Writeable { - CREATE(0), - INDEX(1), - DELETE(2), - NOOP(3); + /** + * An enum that represents the the results of CRUD operations, primarily used to communicate the type of + * operation that occurred. + */ + public enum Result implements Writeable { + CREATED(0), + UPDATED(1), + DELETED(2), + NOT_FOUND(3), + NOOP(4); private final byte op; private final String lowercase; - Operation(int op) { + Result(int op) { this.op = (byte) op; this.lowercase = this.toString().toLowerCase(Locale.ENGLISH); } @@ -62,19 +67,21 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr return lowercase; } - public static Operation readFrom(StreamInput in) throws IOException{ + public static Result readFrom(StreamInput in) throws IOException{ Byte opcode = in.readByte(); switch(opcode){ case 0: - return CREATE; + return CREATED; case 1: - return INDEX; + return UPDATED; case 2: - return DELETE; + return DELETED; case 3: + return NOT_FOUND; + case 4: return NOOP; default: - throw new IllegalArgumentException("Unknown operation code: " + opcode); + throw new IllegalArgumentException("Unknown result code: " + opcode); } } @@ -89,14 +96,14 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr private String type; private long version; private boolean forcedRefresh; - protected Operation operation; + protected Result result; - public DocWriteResponse(ShardId shardId, String type, String id, long version, Operation operation) { + public DocWriteResponse(ShardId shardId, String type, String id, long version, Result result) { this.shardId = shardId; this.type = type; this.id = id; this.version = version; - this.operation = operation; + this.result = result; } // needed for deserialization @@ -106,8 +113,8 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr /** * The change that occurred to the document. */ - public Operation getOperation() { - return operation; + public Result getResult() { + return result; } /** @@ -198,7 +205,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr id = in.readString(); version = in.readZLong(); forcedRefresh = in.readBoolean(); - operation = Operation.readFrom(in); + result = Result.readFrom(in); } @Override @@ -209,7 +216,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr out.writeString(id); out.writeZLong(version); out.writeBoolean(forcedRefresh); - operation.writeTo(out); + result.writeTo(out); } @Override @@ -219,7 +226,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr .field("_type", type) .field("_id", id) .field("_version", version) - .field("_operation", getOperation().getLowercase()); + .field("result", getResult().getLowercase()); if (forcedRefresh) { builder.field("forced_refresh", forcedRefresh); } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index d20ee500cda..745449c0a7b 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -239,16 +239,16 @@ public class TransportShardBulkAction extends TransportWriteAction result = updateResult.writeResult; IndexRequest indexRequest = updateResult.request(); BytesReference indexSourceAsBytes = indexRequest.source(); // add the response IndexResponse indexResponse = result.getResponse(); - UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.getOperation()); + UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.getResult()); if (updateRequest.fields() != null && updateRequest.fields().length > 0) { Tuple> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true); updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); @@ -256,12 +256,12 @@ public class TransportShardBulkAction extends TransportWriteAction writeResult = updateResult.writeResult; DeleteResponse response = writeResult.getResponse(); DeleteRequest deleteRequest = updateResult.request(); - updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation()); + updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult()); updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null)); // Replace the update request to the translated delete request to execute on the replica. item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), deleteRequest); @@ -271,6 +271,8 @@ public class TransportShardBulkAction extends TransportWriteAction result = TransportDeleteAction.executeDeleteRequestOnPrimary(deleteRequest, indexShard); @@ -432,7 +436,7 @@ public class TransportShardBulkAction extends TransportWriteAction() { @Override public void onResponse(IndexResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation()); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult()); if (request.fields() != null && request.fields().length > 0) { Tuple> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); @@ -217,14 +216,14 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio } }); break; - case INDEX: + case UPDATED: IndexRequest indexRequest = result.action(); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference indexSourceBytes = indexRequest.source(); indexAction.execute(indexRequest, new ActionListener() { @Override public void onResponse(IndexResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation()); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult()); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); update.setForcedRefresh(response.forcedRefresh()); listener.onResponse(update); @@ -248,12 +247,12 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio } }); break; - case DELETE: + case DELETED: DeleteRequest deleteRequest = result.action(); deleteAction.execute(deleteRequest, new ActionListener() { @Override public void onResponse(DeleteResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation()); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult()); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null)); update.setForcedRefresh(response.forcedRefresh()); listener.onResponse(update); @@ -289,7 +288,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio listener.onResponse(update); break; default: - throw new IllegalStateException("Illegal operation " + result.operation()); + throw new IllegalStateException("Illegal result " + result.getResponseResult()); } } } diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 209d95530c7..03600461599 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -117,9 +117,9 @@ public class UpdateHelper extends AbstractComponent { request.script.getScript()); } UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), - getResult.getVersion(), DocWriteResponse.Operation.NOOP); + getResult.getVersion(), DocWriteResponse.Result.NOOP); update.setGetResult(getResult); - return new Result(update, DocWriteResponse.Operation.NOOP, upsertDoc, XContentType.JSON); + return new Result(update, DocWriteResponse.Result.NOOP, upsertDoc, XContentType.JSON); } indexRequest.source((Map) ctx.get("_source")); } @@ -136,7 +136,7 @@ public class UpdateHelper extends AbstractComponent { // in all but the internal versioning mode, we want to create the new document using the given version. indexRequest.version(request.version()).versionType(request.versionType()); } - return new Result(indexRequest, DocWriteResponse.Operation.CREATE, null, null); + return new Result(indexRequest, DocWriteResponse.Result.CREATED, null, null); } long updateVersion = getResult.getVersion(); @@ -227,21 +227,21 @@ public class UpdateHelper extends AbstractComponent { .consistencyLevel(request.consistencyLevel()) .timestamp(timestamp).ttl(ttl) .setRefreshPolicy(request.getRefreshPolicy()); - return new Result(indexRequest, DocWriteResponse.Operation.INDEX, updatedSourceAsMap, updateSourceContentType); + return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType); } else if ("delete".equals(operation)) { DeleteRequest deleteRequest = Requests.deleteRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent) .version(updateVersion).versionType(request.versionType()) .consistencyLevel(request.consistencyLevel()) .setRefreshPolicy(request.getRefreshPolicy()); - return new Result(deleteRequest, DocWriteResponse.Operation.DELETE, updatedSourceAsMap, updateSourceContentType); + return new Result(deleteRequest, DocWriteResponse.Result.DELETED, updatedSourceAsMap, updateSourceContentType); } else if ("none".equals(operation)) { - UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Operation.NOOP); + UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); - return new Result(update, DocWriteResponse.Operation.NOOP, updatedSourceAsMap, updateSourceContentType); + return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); } else { logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript()); - UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Operation.NOOP); - return new Result(update, DocWriteResponse.Operation.NOOP, updatedSourceAsMap, updateSourceContentType); + UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); + return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); } } @@ -310,13 +310,13 @@ public class UpdateHelper extends AbstractComponent { public static class Result { private final Streamable action; - private final DocWriteResponse.Operation operation; + private final DocWriteResponse.Result result; private final Map updatedSourceAsMap; private final XContentType updateSourceContentType; - public Result(Streamable action, DocWriteResponse.Operation operation, Map updatedSourceAsMap, XContentType updateSourceContentType) { + public Result(Streamable action, DocWriteResponse.Result result, Map updatedSourceAsMap, XContentType updateSourceContentType) { this.action = action; - this.operation = operation; + this.result = result; this.updatedSourceAsMap = updatedSourceAsMap; this.updateSourceContentType = updateSourceContentType; } @@ -326,8 +326,8 @@ public class UpdateHelper extends AbstractComponent { return (T) action; } - public DocWriteResponse.Operation operation() { - return operation; + public DocWriteResponse.Result getResponseResult() { + return result; } public Map updatedSourceAsMap() { diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java index 2183dfe4f90..8061174d091 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java @@ -40,13 +40,13 @@ public class UpdateResponse extends DocWriteResponse { * Constructor to be used when a update didn't translate in a write. * For example: update script with operation set to none */ - public UpdateResponse(ShardId shardId, String type, String id, long version, Operation operation) { - this(new ShardInfo(0, 0), shardId, type, id, version, operation); + public UpdateResponse(ShardId shardId, String type, String id, long version, Result result) { + this(new ShardInfo(0, 0), shardId, type, id, version, result); } public UpdateResponse(ShardInfo shardInfo, ShardId shardId, String type, String id, - long version, Operation operation) { - super(shardId, type, id, version, operation); + long version, Result result) { + super(shardId, type, id, version, result); setShardInfo(shardInfo); } @@ -60,7 +60,7 @@ public class UpdateResponse extends DocWriteResponse { @Override public RestStatus status() { - return this.operation == Operation.CREATE ? RestStatus.CREATED : super.status(); + return this.result == Result.CREATED ? RestStatus.CREATED : super.status(); } @Override @@ -106,7 +106,7 @@ public class UpdateResponse extends DocWriteResponse { builder.append(",type=").append(getType()); builder.append(",id=").append(getId()); builder.append(",version=").append(getVersion()); - builder.append(",operation=").append(getOperation().getLowercase()); + builder.append(",result=").append(getResult().getLowercase()); builder.append(",shards=").append(getShardInfo()); return builder.append("]").toString(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index f0a0fdec665..70880373530 100644 --- a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.snapshots.Snapshot; import java.io.IOException; @@ -70,12 +71,12 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus private final boolean includeGlobalState; private final boolean partial; private final ImmutableOpenMap shards; - private final List indices; + private final List indices; private final ImmutableOpenMap> waitingIndices; private final long startTime; - public Entry(Snapshot snapshot, boolean includeGlobalState, boolean partial, State state, List indices, long startTime, - ImmutableOpenMap shards) { + public Entry(Snapshot snapshot, boolean includeGlobalState, boolean partial, State state, List indices, + long startTime, ImmutableOpenMap shards) { this.state = state; this.snapshot = snapshot; this.includeGlobalState = includeGlobalState; @@ -111,7 +112,7 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus return state; } - public List indices() { + public List indices() { return indices; } @@ -377,9 +378,9 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus boolean partial = in.readBoolean(); State state = State.fromValue(in.readByte()); int indices = in.readVInt(); - List indexBuilder = new ArrayList<>(); + List indexBuilder = new ArrayList<>(); for (int j = 0; j < indices; j++) { - indexBuilder.add(in.readString()); + indexBuilder.add(new IndexId(in.readString(), in.readString())); } long startTime = in.readLong(); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); @@ -410,8 +411,8 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus out.writeBoolean(entry.partial()); out.writeByte(entry.state().value()); out.writeVInt(entry.indices().size()); - for (String index : entry.indices()) { - out.writeString(index); + for (IndexId index : entry.indices()) { + index.writeTo(out); } out.writeLong(entry.startTime()); out.writeVInt(entry.shards().size()); @@ -458,8 +459,8 @@ public class SnapshotsInProgress extends AbstractDiffable implements Cus builder.field(STATE, entry.state()); builder.startArray(INDICES); { - for (String index : entry.indices()) { - builder.value(index); + for (IndexId index : entry.indices()) { + index.toXContent(builder, params); } } builder.endArray(); diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index 024a882a7de..4229ee954d4 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.bytes.BytesReference; import java.io.IOException; import java.io.InputStream; -import java.util.Collection; +import java.nio.file.NoSuchFileException; import java.util.Map; /** @@ -53,7 +53,8 @@ public interface BlobContainer { * @param blobName * The name of the blob to get an {@link InputStream} for. * @return The {@code InputStream} to read the blob. - * @throws IOException if the blob does not exist or can not be read. + * @throws NoSuchFileException if the blob does not exist + * @throws IOException if the blob can not be read. */ InputStream readBlob(String blobName) throws IOException; @@ -95,7 +96,8 @@ public interface BlobContainer { * * @param blobName * The name of the blob to delete. - * @throws IOException if the blob does not exist, or if the blob exists but could not be deleted. + * @throws NoSuchFileException if the blob does not exist + * @throws IOException if the blob exists but could not be deleted. */ void deleteBlob(String blobName) throws IOException; diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index 822f8d1721a..02a5aa357df 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -27,13 +27,16 @@ import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.io.Streams; import java.io.BufferedInputStream; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.DirectoryStream; import java.nio.file.Files; +import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.StandardCopyOption; +import java.nio.file.StandardOpenOption; import java.nio.file.attribute.BasicFileAttributes; import java.util.HashMap; import java.util.Map; @@ -85,7 +88,7 @@ public class FsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { Path blobPath = path.resolve(blobName); - Files.deleteIfExists(blobPath); + Files.delete(blobPath); } @Override @@ -95,14 +98,18 @@ public class FsBlobContainer extends AbstractBlobContainer { @Override public InputStream readBlob(String name) throws IOException { - return new BufferedInputStream(Files.newInputStream(path.resolve(name)), blobStore.bufferSizeInBytes()); + final Path resolvedPath = path.resolve(name); + try { + return new BufferedInputStream(Files.newInputStream(resolvedPath), blobStore.bufferSizeInBytes()); + } catch (FileNotFoundException fnfe) { + throw new NoSuchFileException("[" + name + "] blob not found"); + } } @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { final Path file = path.resolve(blobName); - // TODO: why is this not specifying CREATE_NEW? Do we really need to be able to truncate existing files? - try (OutputStream outputStream = Files.newOutputStream(file)) { + try (OutputStream outputStream = Files.newOutputStream(file, StandardOpenOption.CREATE_NEW)) { Streams.copy(inputStream, outputStream, new byte[blobStore.bufferSizeInBytes()]); } IOUtils.fsync(file, false); diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java index 60be21127bf..1c4652c9f10 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java @@ -20,14 +20,11 @@ package org.elasticsearch.common.blobstore.support; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.bytes.BytesReference; import java.io.IOException; import java.io.InputStream; -import java.util.Collection; -import java.util.Map; /** * A base abstract blob container that implements higher level container methods. diff --git a/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java b/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java index 2e5ee110d60..4399ba6a8fe 100644 --- a/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java +++ b/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java @@ -30,11 +30,14 @@ import java.util.Set; /** * A utility class for multi threaded operation that needs to be cancellable via interrupts. Every cancellable operation should be * executed via {@link #execute(Interruptable)}, which will capture the executing thread and make sure it is interrupted in the case - * cancellation. + * of cancellation. + * + * Cancellation policy: This class does not support external interruption via Thread#interrupt(). Always use #cancel() instead. */ public class CancellableThreads { private final Set threads = new HashSet<>(); - private boolean cancelled = false; + // needs to be volatile as it is also read outside of synchronized blocks. + private volatile boolean cancelled = false; private String reason; public synchronized boolean isCancelled() { @@ -94,13 +97,18 @@ public class CancellableThreads { */ public void executeIO(IOInterruptable interruptable) throws IOException { boolean wasInterrupted = add(); + boolean cancelledByExternalInterrupt = false; RuntimeException runtimeException = null; IOException ioException = null; try { interruptable.run(); } catch (InterruptedException | ThreadInterruptedException e) { - // assume this is us and ignore + // ignore, this interrupt has been triggered by us in #cancel()... + assert cancelled : "Interruption via Thread#interrupt() is unsupported. Use CancellableThreads#cancel() instead"; + // we can only reach here if assertions are disabled. If we reach this code and cancelled is false, this means that we've + // been interrupted externally (which we don't support). + cancelledByExternalInterrupt = !cancelled; } catch (RuntimeException t) { runtimeException = t; } catch (IOException e) { @@ -128,6 +136,12 @@ public class CancellableThreads { throw runtimeException; } } + if (cancelledByExternalInterrupt) { + // restore interrupt flag to at least adhere to expected behavior + Thread.currentThread().interrupt(); + throw new RuntimeException("Interruption via Thread#interrupt() is unsupported. Use CancellableThreads#cancel() instead"); + } + } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java index c86e80e289b..ff5cdd4e31e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java @@ -281,16 +281,16 @@ public class NodeJoinController extends AbstractComponent { Map tasks = getPendingAsTasks(); final String source = "zen-disco-elected-as-master ([" + tasks.size() + "] nodes joined)"; - tasks.put(BECOME_MASTER_TASK, joinProcessedListener); + tasks.put(BECOME_MASTER_TASK, (source1, e) -> {}); // noop listener, the election finished listener determines result + tasks.put(FINISH_ELECTION_TASK, electionFinishedListener); clusterService.submitStateUpdateTasks(source, tasks, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor); } public synchronized void closeAndProcessPending(String reason) { innerClose(); Map tasks = getPendingAsTasks(); - final String source = "zen-disco-process-pending-joins [" + reason + "]"; - - tasks.put(FINISH_ELECTION_NOT_MASTER_TASK, joinProcessedListener); + final String source = "zen-disco-election-stop [" + reason + "]"; + tasks.put(FINISH_ELECTION_TASK, electionFinishedListener); clusterService.submitStateUpdateTasks(source, tasks, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor); } @@ -327,12 +327,15 @@ public class NodeJoinController extends AbstractComponent { } } - private final ClusterStateTaskListener joinProcessedListener = new ClusterStateTaskListener() { + private final ClusterStateTaskListener electionFinishedListener = new ClusterStateTaskListener() { @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - assert newState.nodes().isLocalNodeElectedMaster() : "should have become a master but isn't " + newState.prettyPrint(); - onElectedAsMaster(newState); + if (newState.nodes().isLocalNodeElectedMaster()) { + ElectionContext.this.onElectedAsMaster(newState); + } else { + onFailure(source, new NotMasterException("election stopped [" + source + "]")); + } } @Override @@ -379,7 +382,9 @@ public class NodeJoinController extends AbstractComponent { } } - // a task indicated that the current node should become master, if no current master is known + /** + * a task indicated that the current node should become master, if no current master is known + */ private static final DiscoveryNode BECOME_MASTER_TASK = new DiscoveryNode("_BECOME_MASTER_TASK_", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT) { @Override @@ -388,9 +393,11 @@ public class NodeJoinController extends AbstractComponent { } }; - // a task that is used to process pending joins without explicitly becoming a master and listening to the results - // this task is used when election is stop without the local node becoming a master per se (though it might - private static final DiscoveryNode FINISH_ELECTION_NOT_MASTER_TASK = new DiscoveryNode("_NOT_MASTER_TASK_", + /** + * a task that is used to signal the election is stopped and we should process pending joins. + * it may be use in combination with {@link #BECOME_MASTER_TASK} + */ + private static final DiscoveryNode FINISH_ELECTION_TASK = new DiscoveryNode("_FINISH_ELECTION_", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT) { @Override public String toString() { @@ -402,31 +409,35 @@ public class NodeJoinController extends AbstractComponent { @Override public BatchResult execute(ClusterState currentState, List joiningNodes) throws Exception { - final DiscoveryNodes currentNodes = currentState.nodes(); final BatchResult.Builder results = BatchResult.builder(); + + final DiscoveryNodes currentNodes = currentState.nodes(); boolean nodesChanged = false; ClusterState.Builder newState = ClusterState.builder(currentState); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentNodes); - if (currentNodes.getMasterNode() == null && joiningNodes.contains(BECOME_MASTER_TASK)) { + if (joiningNodes.size() == 1 && joiningNodes.get(0).equals(FINISH_ELECTION_TASK)) { + return results.successes(joiningNodes).build(currentState); + } else if (currentNodes.getMasterNode() == null && joiningNodes.contains(BECOME_MASTER_TASK)) { + assert joiningNodes.contains(FINISH_ELECTION_TASK) : "becoming a master but election is not finished " + joiningNodes; // use these joins to try and become the master. // Note that we don't have to do any validation of the amount of joining nodes - the commit // during the cluster state publishing guarantees that we have enough - nodesBuilder.masterNodeId(currentNodes.getLocalNodeId()); ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks()) .removeGlobalBlock(discoverySettings.getNoMasterBlock()).build(); newState.blocks(clusterBlocks); nodesChanged = true; - } - - if (nodesBuilder.isLocalNodeElectedMaster() == false) { + } else if (nodesBuilder.isLocalNodeElectedMaster() == false) { logger.trace("processing node joins, but we are not the master. current master: {}", currentNodes.getMasterNode()); throw new NotMasterException("Node [" + currentNodes.getLocalNode() + "] not master for join request"); } + assert nodesBuilder.isLocalNodeElectedMaster(); + + // processing any joins for (final DiscoveryNode node : joiningNodes) { - if (node.equals(BECOME_MASTER_TASK) || node.equals(FINISH_ELECTION_NOT_MASTER_TASK)) { + if (node.equals(BECOME_MASTER_TASK) || node.equals(FINISH_ELECTION_TASK)) { // noop } else if (currentNodes.nodeExists(node)) { logger.debug("received a join request for an existing node [{}]", node); diff --git a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index ff2d1d298a5..e08130d9d8d 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException; import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; import java.io.IOException; @@ -394,10 +395,12 @@ final class StoreRecovery { translogState.totalOperationsOnStart(0); indexShard.prepareForIndexRecovery(); ShardId snapshotShardId = shardId; - if (!shardId.getIndexName().equals(restoreSource.index())) { - snapshotShardId = new ShardId(restoreSource.index(), IndexMetaData.INDEX_UUID_NA_VALUE, shardId.id()); + final String indexName = restoreSource.index(); + if (!shardId.getIndexName().equals(indexName)) { + snapshotShardId = new ShardId(indexName, IndexMetaData.INDEX_UUID_NA_VALUE, shardId.id()); } - repository.restoreShard(indexShard, restoreSource.snapshot().getSnapshotId(), restoreSource.version(), snapshotShardId, indexShard.recoveryState()); + final IndexId indexId = repository.getRepositoryData().resolveIndexId(indexName); + repository.restoreShard(indexShard, restoreSource.snapshot().getSnapshotId(), restoreSource.version(), indexId, snapshotShardId, indexShard.recoveryState()); indexShard.skipTranslogRecovery(); indexShard.finalizeRecovery(); indexShard.postRecovery("restore done"); diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index 581e8d6a903..41a430d0c82 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -113,8 +113,7 @@ public class TranslogReader extends BaseTranslogReader implements Closeable { headerStream.read(ref.bytes, ref.offset, ref.length); BytesRef uuidBytes = new BytesRef(translogUUID); if (uuidBytes.bytesEquals(ref) == false) { - throw new TranslogCorruptedException("expected shard UUID " + uuidBytes + "/" + uuidBytes.utf8ToString() + - " but got: " + ref + "/" + ref.utf8ToString() + + throw new TranslogCorruptedException("expected shard UUID " + uuidBytes + " but got: " + ref + " this translog file belongs to a different translog. path:" + path); } return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + Integer.BYTES, checkpoint.offset, checkpoint.numOps); diff --git a/core/src/main/java/org/elasticsearch/repositories/IndexId.java b/core/src/main/java/org/elasticsearch/repositories/IndexId.java new file mode 100644 index 00000000000..434582e61ed --- /dev/null +++ b/core/src/main/java/org/elasticsearch/repositories/IndexId.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; + +import java.io.IOException; +import java.util.Objects; + +/** + * Represents a single snapshotted index in the repository. + */ +public final class IndexId implements Writeable, ToXContent { + protected static final String NAME = "name"; + protected static final String ID = "id"; + + private final String name; + private final String id; + + public IndexId(final String name, final String id) { + this.name = name; + this.id = id; + } + + public IndexId(final StreamInput in) throws IOException { + this.name = in.readString(); + this.id = in.readString(); + } + + /** + * The name of the index. + */ + public String getName() { + return name; + } + + /** + * The unique ID for the index within the repository. This is *not* the same as the + * index's UUID, but merely a unique file/URL friendly identifier that a repository can + * use to name blobs for the index. + * + * We could not use the index's actual UUID (See {@link Index#getUUID()}) because in the + * case of snapshot/restore, the index UUID in the snapshotted index will be different + * from the index UUID assigned to it when it is restored. Hence, the actual index UUID + * is not useful in the context of snapshot/restore for tying a snapshotted index to the + * index it was snapshot from, and so we are using a separate UUID here. + */ + public String getId() { + return id; + } + + @Override + public String toString() { + return "[" + name + "/" + id + "]"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + @SuppressWarnings("unchecked") IndexId that = (IndexId) o; + return Objects.equals(name, that.name) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(name, id); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(id); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(NAME, name); + builder.field(ID, id); + builder.endObject(); + return builder; + } +} diff --git a/core/src/main/java/org/elasticsearch/repositories/Repository.java b/core/src/main/java/org/elasticsearch/repositories/Repository.java index 11a060d73e8..544f757737c 100644 --- a/core/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/core/src/main/java/org/elasticsearch/repositories/Repository.java @@ -47,7 +47,7 @@ import java.util.List; *
    *
  • Master calls {@link #initializeSnapshot(SnapshotId, List, org.elasticsearch.cluster.metadata.MetaData)} * with list of indices that will be included into the snapshot
  • - *
  • Data nodes call {@link Repository#snapshotShard(IndexShard, SnapshotId, IndexCommit, IndexShardSnapshotStatus)} + *
  • Data nodes call {@link Repository#snapshotShard(IndexShard, SnapshotId, IndexId, IndexCommit, IndexShardSnapshotStatus)} * for each shard
  • *
  • When all shard calls return master calls {@link #finalizeSnapshot} with possible list of failures
  • *
@@ -88,15 +88,14 @@ public interface Repository extends LifecycleComponent { * @param indices list of indices * @return information about snapshot */ - MetaData getSnapshotMetaData(SnapshotInfo snapshot, List indices) throws IOException; + MetaData getSnapshotMetaData(SnapshotInfo snapshot, List indices) throws IOException; /** - * Returns the list of snapshots currently stored in the repository that match the given predicate on the snapshot name. - * To get all snapshots, the predicate filter should return true regardless of the input. - * - * @return snapshot list + * Returns a {@link RepositoryData} to describe the data in the repository, including the snapshots + * and the indices across all snapshots found in the repository. Throws a {@link RepositoryException} + * if there was an error in reading the data. */ - List getSnapshots(); + RepositoryData getRepositoryData(); /** * Starts snapshotting process @@ -105,7 +104,7 @@ public interface Repository extends LifecycleComponent { * @param indices list of indices to be snapshotted * @param metaData cluster metadata */ - void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData metaData); + void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData metaData); /** * Finalizes snapshotting process @@ -113,12 +112,14 @@ public interface Repository extends LifecycleComponent { * This method is called on master after all shards are snapshotted. * * @param snapshotId snapshot id + * @param indices list of indices in the snapshot + * @param startTime start time of the snapshot * @param failure global failure reason or null * @param totalShards total number of shards * @param shardFailures list of shard failures * @return snapshot description */ - SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List indices, long startTime, String failure, int totalShards, List shardFailures); + SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List indices, long startTime, String failure, int totalShards, List shardFailures); /** * Deletes snapshot @@ -181,10 +182,11 @@ public interface Repository extends LifecycleComponent { * * @param shard shard to be snapshotted * @param snapshotId snapshot id + * @param indexId id for the index being snapshotted * @param snapshotIndexCommit commit point * @param snapshotStatus snapshot status */ - void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus); + void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus); /** * Restores snapshot of the shard. @@ -194,20 +196,22 @@ public interface Repository extends LifecycleComponent { * @param shard the shard to restore the index into * @param snapshotId snapshot id * @param version version of elasticsearch that created this snapshot + * @param indexId id of the index in the repository from which the restore is occurring * @param snapshotShardId shard id (in the snapshot) * @param recoveryState recovery state */ - void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState); + void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState); /** * Retrieve shard snapshot status for the stored snapshot * * @param snapshotId snapshot id * @param version version of elasticsearch that created this snapshot + * @param indexId the snapshotted index id for the shard to get status for * @param shardId shard id * @return snapshot status */ - IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, ShardId shardId); + IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId); } diff --git a/core/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/core/src/main/java/org/elasticsearch/repositories/RepositoryData.java new file mode 100644 index 00000000000..4927e2b41b7 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -0,0 +1,311 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.snapshots.SnapshotId; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * A class that represents the data in a repository, as captured in the + * repository's index blob. + */ +public final class RepositoryData implements ToXContent { + + public static final RepositoryData EMPTY = new RepositoryData(Collections.emptyList(), Collections.emptyMap()); + + /** + * The ids of the snapshots in the repository. + */ + private final List snapshotIds; + /** + * The indices found in the repository across all snapshots, as a name to {@link IndexId} mapping + */ + private final Map indices; + /** + * The snapshots that each index belongs to. + */ + private final Map> indexSnapshots; + + public RepositoryData(List snapshotIds, Map> indexSnapshots) { + this.snapshotIds = Collections.unmodifiableList(snapshotIds); + this.indices = Collections.unmodifiableMap(indexSnapshots.keySet() + .stream() + .collect(Collectors.toMap(IndexId::getName, Function.identity()))); + this.indexSnapshots = Collections.unmodifiableMap(indexSnapshots); + } + + protected RepositoryData copy() { + return new RepositoryData(snapshotIds, indexSnapshots); + } + + /** + * Returns an unmodifiable list of the snapshot ids. + */ + public List getSnapshotIds() { + return snapshotIds; + } + + /** + * Returns an unmodifiable map of the index names to {@link IndexId} in the repository. + */ + public Map getIndices() { + return indices; + } + + /** + * Add a snapshot and its indices to the repository; returns a new instance. If the snapshot + * already exists in the repository data, this method throws an IllegalArgumentException. + */ + public RepositoryData addSnapshot(final SnapshotId snapshotId, final List snapshottedIndices) { + if (snapshotIds.contains(snapshotId)) { + throw new IllegalArgumentException("[" + snapshotId + "] already exists in the repository data"); + } + List snapshots = new ArrayList<>(snapshotIds); + snapshots.add(snapshotId); + Map> allIndexSnapshots = new HashMap<>(indexSnapshots); + for (final IndexId indexId : snapshottedIndices) { + if (allIndexSnapshots.containsKey(indexId)) { + Set ids = allIndexSnapshots.get(indexId); + if (ids == null) { + ids = new LinkedHashSet<>(); + allIndexSnapshots.put(indexId, ids); + } + ids.add(snapshotId); + } else { + Set ids = new LinkedHashSet<>(); + ids.add(snapshotId); + allIndexSnapshots.put(indexId, ids); + } + } + return new RepositoryData(snapshots, allIndexSnapshots); + } + + /** + * Initializes the indices in the repository metadata; returns a new instance. + */ + public RepositoryData initIndices(final Map> indexSnapshots) { + return new RepositoryData(snapshotIds, indexSnapshots); + } + + /** + * Remove a snapshot and remove any indices that no longer exist in the repository due to the deletion of the snapshot. + */ + public RepositoryData removeSnapshot(final SnapshotId snapshotId) { + List newSnapshotIds = snapshotIds + .stream() + .filter(id -> snapshotId.equals(id) == false) + .collect(Collectors.toList()); + Map> indexSnapshots = new HashMap<>(); + for (final IndexId indexId : indices.values()) { + Set set; + Set snapshotIds = this.indexSnapshots.get(indexId); + assert snapshotIds != null; + if (snapshotIds.contains(snapshotId)) { + if (snapshotIds.size() == 1) { + // removing the snapshot will mean no more snapshots have this index, so just skip over it + continue; + } + set = new LinkedHashSet<>(snapshotIds); + set.remove(snapshotId); + } else { + set = snapshotIds; + } + indexSnapshots.put(indexId, set); + } + + return new RepositoryData(newSnapshotIds, indexSnapshots); + } + + /** + * Returns an immutable collection of the snapshot ids for the snapshots that contain the given index. + */ + public Set getSnapshots(final IndexId indexId) { + Set snapshotIds = indexSnapshots.get(indexId); + if (snapshotIds == null) { + throw new IllegalArgumentException("unknown snapshot index " + indexId + ""); + } + return snapshotIds; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + @SuppressWarnings("unchecked") RepositoryData that = (RepositoryData) obj; + return snapshotIds.equals(that.snapshotIds) + && indices.equals(that.indices) + && indexSnapshots.equals(that.indexSnapshots); + } + + @Override + public int hashCode() { + return Objects.hash(snapshotIds, indices, indexSnapshots); + } + + /** + * Resolve the index name to the index id specific to the repository, + * throwing an exception if the index could not be resolved. + */ + public IndexId resolveIndexId(final String indexName) { + if (indices.containsKey(indexName)) { + return indices.get(indexName); + } else { + // on repositories created before 5.0, there was no indices information in the index + // blob, so if the repository hasn't been updated with new snapshots, no new index blob + // would have been written, so we only have old snapshots without the index information. + // in this case, the index id is just the index name + return new IndexId(indexName, indexName); + } + } + + /** + * Resolve the given index names to index ids. + */ + public List resolveIndices(final List indices) { + List resolvedIndices = new ArrayList<>(indices.size()); + for (final String indexName : indices) { + resolvedIndices.add(resolveIndexId(indexName)); + } + return resolvedIndices; + } + + /** + * Resolve the given index names to index ids, creating new index ids for + * new indices in the repository. + */ + public List resolveNewIndices(final List indicesToResolve) { + List snapshotIndices = new ArrayList<>(); + for (String index : indicesToResolve) { + final IndexId indexId; + if (indices.containsKey(index)) { + indexId = indices.get(index); + } else { + indexId = new IndexId(index, UUIDs.randomBase64UUID()); + } + snapshotIndices.add(indexId); + } + return snapshotIndices; + } + + private static final String SNAPSHOTS = "snapshots"; + private static final String INDICES = "indices"; + private static final String INDEX_ID = "id"; + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + // write the snapshots list + builder.startArray(SNAPSHOTS); + for (final SnapshotId snapshot : getSnapshotIds()) { + snapshot.toXContent(builder, params); + } + builder.endArray(); + // write the indices map + builder.startObject(INDICES); + for (final IndexId indexId : getIndices().values()) { + builder.startObject(indexId.getName()); + builder.field(INDEX_ID, indexId.getId()); + builder.startArray(SNAPSHOTS); + Set snapshotIds = indexSnapshots.get(indexId); + assert snapshotIds != null; + for (final SnapshotId snapshotId : snapshotIds) { + snapshotId.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + } + builder.endObject(); + builder.endObject(); + return builder; + } + + public static RepositoryData fromXContent(final XContentParser parser) throws IOException { + List snapshots = new ArrayList<>(); + Map> indexSnapshots = new HashMap<>(); + if (parser.nextToken() == XContentParser.Token.START_OBJECT) { + while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + String currentFieldName = parser.currentName(); + if (SNAPSHOTS.equals(currentFieldName)) { + if (parser.nextToken() == XContentParser.Token.START_ARRAY) { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + snapshots.add(SnapshotId.fromXContent(parser)); + } + } else { + throw new ElasticsearchParseException("expected array for [" + currentFieldName + "]"); + } + } else if (INDICES.equals(currentFieldName)) { + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("start object expected [indices]"); + } + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String indexName = parser.currentName(); + String indexId = null; + Set snapshotIds = new LinkedHashSet<>(); + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("start object expected index[" + indexName + "]"); + } + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String indexMetaFieldName = parser.currentName(); + parser.nextToken(); + if (INDEX_ID.equals(indexMetaFieldName)) { + indexId = parser.text(); + } else if (SNAPSHOTS.equals(indexMetaFieldName)) { + if (parser.currentToken() != XContentParser.Token.START_ARRAY) { + throw new ElasticsearchParseException("start array expected [snapshots]"); + } + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + snapshotIds.add(SnapshotId.fromXContent(parser)); + } + } + } + assert indexId != null; + indexSnapshots.put(new IndexId(indexName, indexId), snapshotIds); + } + } else { + throw new ElasticsearchParseException("unknown field name [" + currentFieldName + "]"); + } + } + } else { + throw new ElasticsearchParseException("start object expected"); + } + return new RepositoryData(snapshots, indexSnapshots); + } + +} diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 06e2b8ff97a..fe11a502c42 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -45,6 +45,8 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException; import org.elasticsearch.index.snapshots.IndexShardSnapshotException; @@ -58,6 +60,8 @@ import org.elasticsearch.index.snapshots.blobstore.SnapshotFiles; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; @@ -103,6 +107,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; @@ -119,14 +124,14 @@ import static java.util.Collections.unmodifiableMap; * {@code * STORE_ROOT * |- index-N - list of all snapshot name as JSON array, N is the generation of the file - * |- index-latest - contains the numeric value of the latest generation of the index file (i.e. N from above) - * |- snapshot-20131010 - JSON serialized Snapshot for snapshot "20131010" + * |- index.latest - contains the numeric value of the latest generation of the index file (i.e. N from above) + * |- snap-20131010 - JSON serialized Snapshot for snapshot "20131010" * |- meta-20131010.dat - JSON serialized MetaData for snapshot "20131010" (includes only global metadata) - * |- snapshot-20131011 - JSON serialized Snapshot for snapshot "20131011" + * |- snap-20131011 - JSON serialized Snapshot for snapshot "20131011" * |- meta-20131011.dat - JSON serialized MetaData for snapshot "20131011" * ..... * |- indices/ - data for all indices - * |- foo/ - data for index "foo" + * |- Ac1342-B_x/ - data for index "foo" which was assigned the unique id of Ac1342-B_x in the repository * | |- meta-20131010.dat - JSON Serialized IndexMetaData for index "foo" * | |- 0/ - data for shard "0" of index "foo" * | | |- __1 \ @@ -146,7 +151,7 @@ import static java.util.Collections.unmodifiableMap; * | |-2/ * | ...... * | - * |- bar/ - data for index bar + * |- 1xB0D8_B3y/ - data for index "bar" which was assigned the unique id of 1xB0D8_B3y in the repository * ...... * } * @@ -163,13 +168,13 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp private static final String SNAPSHOT_PREFIX = "snap-"; - protected static final String SNAPSHOT_CODEC = "snapshot"; + private static final String SNAPSHOT_CODEC = "snapshot"; static final String SNAPSHOTS_FILE = "index"; // package private for unit testing - private static final String SNAPSHOTS_FILE_PREFIX = "index-"; + private static final String INDEX_FILE_PREFIX = "index-"; - private static final String SNAPSHOTS_INDEX_LATEST_BLOB = "index.latest"; + private static final String INDEX_LATEST_BLOB = "index.latest"; private static final String TESTS_FILE = "tests-"; @@ -305,7 +310,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } @Override - public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { + public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetaData) { if (isReadOnly()) { throw new RepositoryException(metadata.name(), "cannot create snapshot in a readonly repository"); } @@ -315,28 +320,69 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp if (getSnapshots().stream().anyMatch(s -> s.getName().equals(snapshotName))) { throw new SnapshotCreationException(metadata.name(), snapshotId, "snapshot with the same name already exists"); } - if (snapshotFormat.exists(snapshotsBlobContainer, blobId(snapshotId)) || + if (snapshotFormat.exists(snapshotsBlobContainer, snapshotId.getUUID()) || snapshotLegacyFormat.exists(snapshotsBlobContainer, snapshotName)) { throw new SnapshotCreationException(metadata.name(), snapshotId, "snapshot with such name already exists"); } + // Write Global MetaData - globalMetaDataFormat.write(clusterMetadata, snapshotsBlobContainer, snapshotName); - for (String index : indices) { - final IndexMetaData indexMetaData = clusterMetadata.index(index); - final BlobPath indexPath = basePath().add("indices").add(index); + globalMetaDataFormat.write(clusterMetaData, snapshotsBlobContainer, snapshotId.getUUID()); + + // write the index metadata for each index in the snapshot + for (IndexId index : indices) { + final IndexMetaData indexMetaData = clusterMetaData.index(index.getName()); + final BlobPath indexPath = basePath().add("indices").add(index.getId()); final BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath); - indexMetaDataFormat.write(indexMetaData, indexMetaDataBlobContainer, snapshotName); + indexMetaDataFormat.write(indexMetaData, indexMetaDataBlobContainer, snapshotId.getUUID()); } } catch (IOException ex) { throw new SnapshotCreationException(metadata.name(), snapshotId, ex); } } + // Older repository index files (index-N) only contain snapshot info, not indices info, + // so if the repository data is of the older format, populate it with the indices entries + // so we know which indices of snapshots have blob ids in the older format. + private RepositoryData upgradeRepositoryData(final RepositoryData repositoryData) throws IOException { + final Map> indexToSnapshots = new HashMap<>(); + for (final SnapshotId snapshotId : repositoryData.getSnapshotIds()) { + final SnapshotInfo snapshotInfo; + try { + snapshotInfo = getSnapshotInfo(snapshotId); + } catch (SnapshotException e) { + logger.warn("[{}] repository is on a pre-5.0 format with an index file that contains snapshot [{}] but " + + "the corresponding snap-{}.dat file cannot be read. The snapshot will no longer be included in " + + "the repository but its data directories will remain.", e, getMetadata().name(), + snapshotId, snapshotId.getUUID()); + continue; + } + for (final String indexName : snapshotInfo.indices()) { + final IndexId indexId = new IndexId(indexName, indexName); + if (indexToSnapshots.containsKey(indexId)) { + indexToSnapshots.get(indexId).add(snapshotId); + } else { + indexToSnapshots.put(indexId, Sets.newHashSet(snapshotId)); + } + } + } + try { + final RepositoryData updatedRepoData = repositoryData.initIndices(indexToSnapshots); + if (isReadOnly() == false) { + // write the new index gen file with the indices included + writeIndexGen(updatedRepoData); + } + return updatedRepoData; + } catch (IOException e) { + throw new RepositoryException(metadata.name(), "failed to update the repository index blob with indices data on startup", e); + } + } + @Override public void deleteSnapshot(SnapshotId snapshotId) { if (isReadOnly()) { throw new RepositoryException(metadata.name(), "cannot delete snapshot from a readonly repository"); } + final RepositoryData repositoryData = getRepositoryData(); List indices = Collections.emptyList(); SnapshotInfo snapshot = null; try { @@ -350,36 +396,29 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp MetaData metaData = null; try { if (snapshot != null) { - metaData = readSnapshotMetaData(snapshotId, snapshot.version(), indices, true); + metaData = readSnapshotMetaData(snapshotId, snapshot.version(), repositoryData.resolveIndices(indices), true); } else { - metaData = readSnapshotMetaData(snapshotId, null, indices, true); + metaData = readSnapshotMetaData(snapshotId, null, repositoryData.resolveIndices(indices), true); } } catch (IOException | SnapshotException ex) { logger.warn("cannot read metadata for snapshot [{}]", ex, snapshotId); } try { - final String snapshotName = snapshotId.getName(); - // Delete snapshot file first so we wouldn't end up with partially deleted snapshot that looks OK - if (snapshot != null) { - snapshotFormat(snapshot.version()).delete(snapshotsBlobContainer, blobId(snapshotId)); - globalMetaDataFormat(snapshot.version()).delete(snapshotsBlobContainer, snapshotName); - } else { - // We don't know which version was the snapshot created with - try deleting both current and legacy formats - snapshotFormat.delete(snapshotsBlobContainer, blobId(snapshotId)); - snapshotLegacyFormat.delete(snapshotsBlobContainer, snapshotName); - globalMetaDataLegacyFormat.delete(snapshotsBlobContainer, snapshotName); - globalMetaDataFormat.delete(snapshotsBlobContainer, snapshotName); - } - // Delete snapshot from the snapshot list - List snapshotIds = getSnapshots().stream().filter(id -> snapshotId.equals(id) == false).collect(Collectors.toList()); - writeSnapshotsToIndexGen(snapshotIds); + // Delete snapshot from the index file, since it is the maintainer of truth of active snapshots + writeIndexGen(repositoryData.removeSnapshot(snapshotId)); + + // delete the snapshot file + safeSnapshotBlobDelete(snapshot, snapshotId.getUUID()); + // delete the global metadata file + safeGlobalMetaDataBlobDelete(snapshot, snapshotId.getUUID()); // Now delete all indices for (String index : indices) { - BlobPath indexPath = basePath().add("indices").add(index); + final IndexId indexId = repositoryData.resolveIndexId(index); + BlobPath indexPath = basePath().add("indices").add(indexId.getId()); BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath); try { - indexMetaDataFormat(snapshot.version()).delete(indexMetaDataBlobContainer, snapshotId.getName()); + indexMetaDataFormat(snapshot.version()).delete(indexMetaDataBlobContainer, snapshotId.getUUID()); } catch (IOException ex) { logger.warn("[{}] failed to delete metadata for index [{}]", ex, snapshotId, index); } @@ -388,7 +427,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp if (indexMetaData != null) { for (int shardId = 0; shardId < indexMetaData.getNumberOfShards(); shardId++) { try { - delete(snapshotId, snapshot.version(), new ShardId(indexMetaData.getIndex(), shardId)); + delete(snapshotId, snapshot.version(), indexId, new ShardId(indexMetaData.getIndex(), shardId)); } catch (SnapshotException ex) { logger.warn("[{}] failed to delete shard data for shard [{}][{}]", ex, snapshotId, index, shardId); } @@ -401,28 +440,77 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } } + private void safeSnapshotBlobDelete(final SnapshotInfo snapshotInfo, final String blobId) { + if (snapshotInfo != null) { + // we know the version the snapshot was created with + try { + snapshotFormat(snapshotInfo.version()).delete(snapshotsBlobContainer, blobId); + } catch (IOException e) { + logger.warn("[{}] Unable to delete snapshot file [{}]", e, snapshotInfo.snapshotId(), blobId); + } + } else { + // we don't know the version, first try the current format, then the legacy format + try { + snapshotFormat.delete(snapshotsBlobContainer, blobId); + } catch (IOException e) { + // now try legacy format + try { + snapshotLegacyFormat.delete(snapshotsBlobContainer, blobId); + } catch (IOException e2) { + // neither snapshot file could be deleted, log the error + logger.warn("Unable to delete snapshot file [{}]", e, blobId); + } + } + } + } + + private void safeGlobalMetaDataBlobDelete(final SnapshotInfo snapshotInfo, final String blobId) { + if (snapshotInfo != null) { + // we know the version the snapshot was created with + try { + globalMetaDataFormat(snapshotInfo.version()).delete(snapshotsBlobContainer, blobId); + } catch (IOException e) { + logger.warn("[{}] Unable to delete global metadata file [{}]", e, snapshotInfo.snapshotId(), blobId); + } + } else { + // we don't know the version, first try the current format, then the legacy format + try { + globalMetaDataFormat.delete(snapshotsBlobContainer, blobId); + } catch (IOException e) { + // now try legacy format + try { + globalMetaDataLegacyFormat.delete(snapshotsBlobContainer, blobId); + } catch (IOException e2) { + // neither global metadata file could be deleted, log the error + logger.warn("Unable to delete global metadata file [{}]", e, blobId); + } + } + } + } + + /** + * {@inheritDoc} + */ @Override public SnapshotInfo finalizeSnapshot(final SnapshotId snapshotId, - final List indices, + final List indices, final long startTime, final String failure, final int totalShards, final List shardFailures) { try { SnapshotInfo blobStoreSnapshot = new SnapshotInfo(snapshotId, - indices, + indices.stream().map(IndexId::getName).collect(Collectors.toList()), startTime, failure, System.currentTimeMillis(), totalShards, shardFailures); - snapshotFormat.write(blobStoreSnapshot, snapshotsBlobContainer, blobId(snapshotId)); - List snapshotIds = getSnapshots(); + snapshotFormat.write(blobStoreSnapshot, snapshotsBlobContainer, snapshotId.getUUID()); + final RepositoryData repositoryData = getRepositoryData(); + List snapshotIds = repositoryData.getSnapshotIds(); if (!snapshotIds.contains(snapshotId)) { - snapshotIds = new ArrayList<>(snapshotIds); - snapshotIds.add(snapshotId); - snapshotIds = Collections.unmodifiableList(snapshotIds); - writeSnapshotsToIndexGen(snapshotIds); + writeIndexGen(repositoryData.addSnapshot(snapshotId, indices)); } return blobStoreSnapshot; } catch (IOException ex) { @@ -430,27 +518,19 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } } - @Override public List getSnapshots() { - try { - return Collections.unmodifiableList(readSnapshotsFromIndex()); - } catch (NoSuchFileException | FileNotFoundException e) { - // its a fresh repository, no index file exists, so return an empty list - return Collections.emptyList(); - } catch (IOException ioe) { - throw new RepositoryException(metadata.name(), "failed to list snapshots in repository", ioe); - } + return getRepositoryData().getSnapshotIds(); } @Override - public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List indices) throws IOException { + public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List indices) throws IOException { return readSnapshotMetaData(snapshot.snapshotId(), snapshot.version(), indices, false); } @Override public SnapshotInfo getSnapshotInfo(final SnapshotId snapshotId) { try { - return snapshotFormat.read(snapshotsBlobContainer, blobId(snapshotId)); + return snapshotFormat.read(snapshotsBlobContainer, snapshotId.getUUID()); } catch (FileNotFoundException | NoSuchFileException ex) { // File is missing - let's try legacy format instead try { @@ -465,13 +545,13 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } } - private MetaData readSnapshotMetaData(SnapshotId snapshotId, Version snapshotVersion, List indices, boolean ignoreIndexErrors) throws IOException { + private MetaData readSnapshotMetaData(SnapshotId snapshotId, Version snapshotVersion, List indices, boolean ignoreIndexErrors) throws IOException { MetaData metaData; if (snapshotVersion == null) { // When we delete corrupted snapshots we might not know which version we are dealing with // We can try detecting the version based on the metadata file format assert ignoreIndexErrors; - if (globalMetaDataFormat.exists(snapshotsBlobContainer, snapshotId.getName())) { + if (globalMetaDataFormat.exists(snapshotsBlobContainer, snapshotId.getUUID())) { snapshotVersion = Version.CURRENT; } else if (globalMetaDataLegacyFormat.exists(snapshotsBlobContainer, snapshotId.getName())) { throw new SnapshotException(metadata.name(), snapshotId, "snapshot is too old"); @@ -480,21 +560,21 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } } try { - metaData = globalMetaDataFormat(snapshotVersion).read(snapshotsBlobContainer, snapshotId.getName()); + metaData = globalMetaDataFormat(snapshotVersion).read(snapshotsBlobContainer, snapshotId.getUUID()); } catch (FileNotFoundException | NoSuchFileException ex) { throw new SnapshotMissingException(metadata.name(), snapshotId, ex); } catch (IOException ex) { throw new SnapshotException(metadata.name(), snapshotId, "failed to get snapshots", ex); } MetaData.Builder metaDataBuilder = MetaData.builder(metaData); - for (String index : indices) { - BlobPath indexPath = basePath().add("indices").add(index); + for (IndexId index : indices) { + BlobPath indexPath = basePath().add("indices").add(index.getId()); BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath); try { - metaDataBuilder.put(indexMetaDataFormat(snapshotVersion).read(indexMetaDataBlobContainer, snapshotId.getName()), false); + metaDataBuilder.put(indexMetaDataFormat(snapshotVersion).read(indexMetaDataBlobContainer, snapshotId.getUUID()), false); } catch (ElasticsearchParseException | IOException ex) { if (ignoreIndexErrors) { - logger.warn("[{}] [{}] failed to read metadata for index", ex, snapshotId, index); + logger.warn("[{}] [{}] failed to read metadata for index", ex, snapshotId, index.getName()); } else { throw ex; } @@ -562,10 +642,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } } - private static final String SNAPSHOTS = "snapshots"; - private static final String NAME = "name"; - private static final String UUID = "uuid"; - @Override public long getSnapshotThrottleTimeInNanos() { return snapshotRateLimitingTimeInNanos.count(); @@ -609,6 +685,43 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } } + @Override + public RepositoryData getRepositoryData() { + try { + final long indexGen = latestIndexBlobId(); + final String snapshotsIndexBlobName; + final boolean legacyFormat; + if (indexGen == -1) { + // index-N file doesn't exist, either its a fresh repository, or its in the + // old format, so look for the older index file before returning an empty list + snapshotsIndexBlobName = SNAPSHOTS_FILE; + legacyFormat = true; + } else { + snapshotsIndexBlobName = INDEX_FILE_PREFIX + Long.toString(indexGen); + legacyFormat = false; + } + + RepositoryData repositoryData; + try (InputStream blob = snapshotsBlobContainer.readBlob(snapshotsIndexBlobName)) { + BytesStreamOutput out = new BytesStreamOutput(); + Streams.copy(blob, out); + try (XContentParser parser = XContentHelper.createParser(out.bytes())) { + repositoryData = RepositoryData.fromXContent(parser); + } + } + if (legacyFormat) { + // pre 5.0 repository data needs to be updated to include the indices + repositoryData = upgradeRepositoryData(repositoryData); + } + return repositoryData; + } catch (NoSuchFileException nsfe) { + // repository doesn't have an index blob, its a new blank repo + return RepositoryData.EMPTY; + } catch (IOException ioe) { + throw new RepositoryException(metadata.name(), "could not read repository data from index blob", ioe); + } + } + public static String testBlobPrefix(String seed) { return TESTS_FILE + seed; } @@ -623,35 +736,30 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp return snapshotsBlobContainer; } - protected void writeSnapshotsToIndexGen(final List snapshots) throws IOException { + protected void writeIndexGen(final RepositoryData repositoryData) throws IOException { assert isReadOnly() == false; // can not write to a read only repository final BytesReference snapshotsBytes; try (BytesStreamOutput bStream = new BytesStreamOutput()) { try (StreamOutput stream = new OutputStreamStreamOutput(bStream)) { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, stream); - builder.startObject(); - builder.startArray(SNAPSHOTS); - for (SnapshotId snapshot : snapshots) { - builder.startObject(); - builder.field(NAME, snapshot.getName()); - builder.field(UUID, snapshot.getUUID()); - builder.endObject(); - } - builder.endArray(); - builder.endObject(); + repositoryData.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); } snapshotsBytes = bStream.bytes(); } final long gen = latestIndexBlobId() + 1; // write the index file - writeAtomic(SNAPSHOTS_FILE_PREFIX + Long.toString(gen), snapshotsBytes); + writeAtomic(INDEX_FILE_PREFIX + Long.toString(gen), snapshotsBytes); // delete the N-2 index file if it exists, keep the previous one around as a backup if (isReadOnly() == false && gen - 2 >= 0) { - final String oldSnapshotIndexFile = SNAPSHOTS_FILE_PREFIX + Long.toString(gen - 2); + final String oldSnapshotIndexFile = INDEX_FILE_PREFIX + Long.toString(gen - 2); if (snapshotsBlobContainer.blobExists(oldSnapshotIndexFile)) { snapshotsBlobContainer.deleteBlob(oldSnapshotIndexFile); } + // delete the old index file (non-generational) if it exists + if (snapshotsBlobContainer.blobExists(SNAPSHOTS_FILE)) { + snapshotsBlobContainer.deleteBlob(SNAPSHOTS_FILE); + } } // write the current generation to the index-latest file @@ -660,72 +768,10 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp bStream.writeLong(gen); genBytes = bStream.bytes(); } - if (snapshotsBlobContainer.blobExists(SNAPSHOTS_INDEX_LATEST_BLOB)) { - snapshotsBlobContainer.deleteBlob(SNAPSHOTS_INDEX_LATEST_BLOB); + if (snapshotsBlobContainer.blobExists(INDEX_LATEST_BLOB)) { + snapshotsBlobContainer.deleteBlob(INDEX_LATEST_BLOB); } - writeAtomic(SNAPSHOTS_INDEX_LATEST_BLOB, genBytes); - } - - protected List readSnapshotsFromIndex() throws IOException { - final long indexGen = latestIndexBlobId(); - final String snapshotsIndexBlobName; - if (indexGen == -1) { - // index-N file doesn't exist, either its a fresh repository, or its in the - // old format, so look for the older index file before returning an empty list - snapshotsIndexBlobName = SNAPSHOTS_FILE; - } else { - snapshotsIndexBlobName = SNAPSHOTS_FILE_PREFIX + Long.toString(indexGen); - } - - try (InputStream blob = snapshotsBlobContainer.readBlob(snapshotsIndexBlobName)) { - BytesStreamOutput out = new BytesStreamOutput(); - Streams.copy(blob, out); - ArrayList snapshots = new ArrayList<>(); - try (XContentParser parser = XContentHelper.createParser(out.bytes())) { - if (parser.nextToken() == XContentParser.Token.START_OBJECT) { - if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - String currentFieldName = parser.currentName(); - if (SNAPSHOTS.equals(currentFieldName)) { - if (parser.nextToken() == XContentParser.Token.START_ARRAY) { - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - // the new format from 5.0 which contains the snapshot name and uuid - String name = null; - String uuid = null; - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - currentFieldName = parser.currentName(); - parser.nextToken(); - if (NAME.equals(currentFieldName)) { - name = parser.text(); - } else if (UUID.equals(currentFieldName)) { - uuid = parser.text(); - } - } - snapshots.add(new SnapshotId(name, uuid)); - } - // the old format pre 5.0 that only contains the snapshot name, use the name as the uuid too - else { - name = parser.text(); - snapshots.add(new SnapshotId(name, SnapshotId.UNASSIGNED_UUID)); - } - } - } - } - } - } - } - return Collections.unmodifiableList(snapshots); - } - } - - // Package private for testing - static String blobId(final SnapshotId snapshotId) { - final String uuid = snapshotId.getUUID(); - if (uuid.equals(SnapshotId.UNASSIGNED_UUID)) { - // the old snapshot blob naming - return snapshotId.getName(); - } - return snapshotId.getName() + "-" + uuid; + writeAtomic(INDEX_LATEST_BLOB, genBytes); } /** @@ -762,7 +808,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp // package private for testing long readSnapshotIndexLatestBlob() throws IOException { - try (InputStream blob = snapshotsBlobContainer.readBlob(SNAPSHOTS_INDEX_LATEST_BLOB)) { + try (InputStream blob = snapshotsBlobContainer.readBlob(INDEX_LATEST_BLOB)) { BytesStreamOutput out = new BytesStreamOutput(); Streams.copy(blob, out); return Numbers.bytesToLong(out.bytes().toBytesRef()); @@ -770,7 +816,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } private long listBlobsToGetLatestIndexId() throws IOException { - Map blobs = snapshotsBlobContainer.listBlobsByPrefix(SNAPSHOTS_FILE_PREFIX); + Map blobs = snapshotsBlobContainer.listBlobsByPrefix(INDEX_FILE_PREFIX); long latest = -1; if (blobs.isEmpty()) { // no snapshot index blobs have been written yet @@ -779,7 +825,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp for (final BlobMetaData blobMetaData : blobs.values()) { final String blobName = blobMetaData.name(); try { - final long curr = Long.parseLong(blobName.substring(SNAPSHOTS_FILE_PREFIX.length())); + final long curr = Long.parseLong(blobName.substring(INDEX_FILE_PREFIX.length())); latest = Math.max(latest, curr); } catch (NumberFormatException nfe) { // the index- blob wasn't of the format index-N where N is a number, @@ -802,9 +848,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } } + + @Override - public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) { - SnapshotContext snapshotContext = new SnapshotContext(shard, snapshotId, snapshotStatus); + public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) { + SnapshotContext snapshotContext = new SnapshotContext(shard, snapshotId, indexId, snapshotStatus); snapshotStatus.startTime(System.currentTimeMillis()); try { @@ -824,8 +872,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } @Override - public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState) { - final RestoreContext snapshotContext = new RestoreContext(shard, snapshotId, version, snapshotShardId, recoveryState); + public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) { + final RestoreContext snapshotContext = new RestoreContext(shard, snapshotId, version, indexId, snapshotShardId, recoveryState); try { snapshotContext.restore(); } catch (Exception e) { @@ -834,8 +882,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, ShardId shardId) { - Context context = new Context(snapshotId, version, shardId); + public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) { + Context context = new Context(snapshotId, version, indexId, shardId); BlobStoreIndexShardSnapshot snapshot = context.loadSnapshot(); IndexShardSnapshotStatus status = new IndexShardSnapshotStatus(); status.updateStage(IndexShardSnapshotStatus.Stage.DONE); @@ -869,8 +917,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp * @param snapshotId snapshot id * @param shardId shard id */ - public void delete(SnapshotId snapshotId, Version version, ShardId shardId) { - Context context = new Context(snapshotId, version, shardId, shardId); + private void delete(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) { + Context context = new Context(snapshotId, version, indexId, shardId, shardId); context.delete(); } @@ -903,15 +951,15 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp protected final Version version; - public Context(SnapshotId snapshotId, Version version, ShardId shardId) { - this(snapshotId, version, shardId, shardId); + public Context(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) { + this(snapshotId, version, indexId, shardId, shardId); } - public Context(SnapshotId snapshotId, Version version, ShardId shardId, ShardId snapshotShardId) { + public Context(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId, ShardId snapshotShardId) { this.snapshotId = snapshotId; this.version = version; this.shardId = shardId; - blobContainer = blobStore().blobContainer(basePath().add("indices").add(snapshotShardId.getIndexName()).add(Integer.toString(snapshotShardId.getId()))); + blobContainer = blobStore().blobContainer(basePath().add("indices").add(indexId.getId()).add(Integer.toString(snapshotShardId.getId()))); } /** @@ -930,7 +978,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp int fileListGeneration = tuple.v2(); try { - indexShardSnapshotFormat(version).delete(blobContainer, snapshotId.getName()); + indexShardSnapshotFormat(version).delete(blobContainer, snapshotId.getUUID()); } catch (IOException e) { logger.debug("[{}] [{}] failed to delete shard snapshot file", shardId, snapshotId); } @@ -951,7 +999,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp */ public BlobStoreIndexShardSnapshot loadSnapshot() { try { - return indexShardSnapshotFormat(version).read(blobContainer, snapshotId.getName()); + return indexShardSnapshotFormat(version).read(blobContainer, snapshotId.getUUID()); } catch (IOException ex) { throw new IndexShardRestoreFailedException(shardId, "failed to read shard snapshot file", ex); } @@ -1080,7 +1128,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp try { BlobStoreIndexShardSnapshot snapshot = null; if (name.startsWith(SNAPSHOT_PREFIX)) { - snapshot = indexShardSnapshotFormat.readBlob(blobContainer, name); + snapshot = indexShardSnapshotFormat.readBlob(blobContainer, snapshotId.getUUID()); } else if (name.startsWith(LEGACY_SNAPSHOT_PREFIX)) { snapshot = indexShardSnapshotLegacyFormat.readBlob(blobContainer, name); } @@ -1109,10 +1157,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp * * @param shard shard to be snapshotted * @param snapshotId snapshot id + * @param indexId the id of the index being snapshotted * @param snapshotStatus snapshot status to report progress */ - public SnapshotContext(IndexShard shard, SnapshotId snapshotId, IndexShardSnapshotStatus snapshotStatus) { - super(snapshotId, Version.CURRENT, shard.shardId()); + public SnapshotContext(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexShardSnapshotStatus snapshotStatus) { + super(snapshotId, Version.CURRENT, indexId, shard.shardId()); this.snapshotStatus = snapshotStatus; this.store = shard.store(); } @@ -1220,7 +1269,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp //TODO: The time stored in snapshot doesn't include cleanup time. logger.trace("[{}] [{}] writing shard snapshot file", shardId, snapshotId); try { - indexShardSnapshotFormat.write(snapshot, blobContainer, snapshotId.getName()); + indexShardSnapshotFormat.write(snapshot, blobContainer, snapshotId.getUUID()); } catch (IOException e) { throw new IndexShardSnapshotFailedException(shardId, "Failed to write commit point", e); } @@ -1396,11 +1445,12 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp * * @param shard shard to restore into * @param snapshotId snapshot id + * @param indexId id of the index being restored * @param snapshotShardId shard in the snapshot that data should be restored from * @param recoveryState recovery state to report progress */ - public RestoreContext(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState) { - super(snapshotId, version, shard.shardId(), snapshotShardId); + public RestoreContext(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) { + super(snapshotId, version, indexId, shard.shardId(), snapshotShardId); this.recoveryState = recoveryState; store = shard.store(); } @@ -1574,6 +1624,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } } } - } + } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index a1aff430f26..e4b80b5dc80 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; @@ -101,7 +100,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust private final ScriptModes scriptModes; private final ScriptContextRegistry scriptContextRegistry; - private final ParseFieldMatcher parseFieldMatcher; private final ScriptMetrics scriptMetrics = new ScriptMetrics(); private ClusterState clusterState; @@ -113,7 +111,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust Objects.requireNonNull(scriptEngineRegistry); Objects.requireNonNull(scriptContextRegistry); Objects.requireNonNull(scriptSettings); - this.parseFieldMatcher = new ParseFieldMatcher(settings); if (Strings.hasLength(settings.get(DISABLE_DYNAMIC_SCRIPTING_SETTING))) { throw new IllegalArgumentException(DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings. \n" + "Dynamic scripts can be enabled for all languages and all operations by replacing `script.disable_dynamic: false` with `script.inline: true` and `script.stored: true` in elasticsearch.yml"); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java index 41b19a3e572..e315f8d816c 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java @@ -19,11 +19,9 @@ package org.elasticsearch.script; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.ScriptService; import java.util.ArrayList; import java.util.Collections; @@ -31,7 +29,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; -import java.util.stream.Collectors; public class ScriptSettings { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/package-info.java new file mode 100644 index 00000000000..ba529e687c8 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Support for limiting the completion suggesters results to within a "context" like a geographic location or a category. + */ +package org.elasticsearch.search.suggest.completion.context; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/package-info.java new file mode 100644 index 00000000000..dfa3f548e86 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Suggests alternate queries by fancy prefix matching. + */ +package org.elasticsearch.search.suggest.completion; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java new file mode 100644 index 00000000000..0d9a9e71963 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Support for completion suggesters with contexts built on 2.x indices. + */ +package org.elasticsearch.search.suggest.completion2x.context; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java new file mode 100644 index 00000000000..b8b14aa7c3f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Support for completion suggesters built on 2.x indices. + */ +package org.elasticsearch.search.suggest.completion2x; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/package-info.java new file mode 100644 index 00000000000..b2da9561083 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Support for suggesting alternate queries. + */ +package org.elasticsearch.search.suggest; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/package-info.java new file mode 100644 index 00000000000..b721881b769 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Suggests alternate queries by breaking the query into terms and suggesting terms that are frequently found together. + */ +package org.elasticsearch.search.suggest.phrase; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/term/package-info.java new file mode 100644 index 00000000000..fb568d405e8 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/suggest/term/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Suggests alternate queries by breaking the query into terms and suggesting more popular terms. + */ +package org.elasticsearch.search.suggest.term; diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index dedcc6d8d21..7ab579aa455 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -63,8 +63,10 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -185,7 +187,8 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis try { // Read snapshot info and metadata from the repository Repository repository = repositoriesService.repository(request.repositoryName); - final Optional matchingSnapshotId = repository.getSnapshots().stream() + final RepositoryData repositoryData = repository.getRepositoryData(); + final Optional matchingSnapshotId = repositoryData.getSnapshotIds().stream() .filter(s -> request.snapshotName.equals(s.getName())).findFirst(); if (matchingSnapshotId.isPresent() == false) { throw new SnapshotRestoreException(request.repositoryName, request.snapshotName, "snapshot does not exist"); @@ -194,7 +197,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis final SnapshotInfo snapshotInfo = repository.getSnapshotInfo(snapshotId); final Snapshot snapshot = new Snapshot(request.repositoryName, snapshotId); List filteredIndices = SnapshotUtils.filterIndices(snapshotInfo.indices(), request.indices(), request.indicesOptions()); - MetaData metaDataIn = repository.getSnapshotMetaData(snapshotInfo, filteredIndices); + MetaData metaDataIn = repository.getSnapshotMetaData(snapshotInfo, repositoryData.resolveIndices(filteredIndices)); final MetaData metaData; if (snapshotInfo.version().before(Version.V_2_0_0_beta1)) { diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotId.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotId.java index 16f371b28f7..4866a79afb9 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotId.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotId.java @@ -22,6 +22,9 @@ package org.elasticsearch.snapshots; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; @@ -29,12 +32,10 @@ import java.util.Objects; /** * SnapshotId - snapshot name + snapshot UUID */ -public final class SnapshotId implements Writeable { +public final class SnapshotId implements Writeable, ToXContent { - /** - * This value is for older snapshots that don't have a UUID. - */ - public static final String UNASSIGNED_UUID = "_na_"; + private static final String NAME = "name"; + private static final String UUID = "uuid"; private final String name; private final String uuid; @@ -115,4 +116,35 @@ public final class SnapshotId implements Writeable { out.writeString(uuid); } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NAME, name); + builder.field(UUID, uuid); + builder.endObject(); + return builder; + } + + public static SnapshotId fromXContent(XContentParser parser) throws IOException { + // the new format from 5.0 which contains the snapshot name and uuid + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + String name = null; + String uuid = null; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String currentFieldName = parser.currentName(); + parser.nextToken(); + if (NAME.equals(currentFieldName)) { + name = parser.text(); + } else if (UUID.equals(currentFieldName)) { + uuid = parser.text(); + } + } + return new SnapshotId(name, uuid); + } else { + // the old format pre 5.0 that only contains the snapshot name, use the name as the uuid too + final String name = parser.text(); + return new SnapshotId(name, name); + } + } + } diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 2159fda2237..ddcee4b0353 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -458,7 +458,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, } if (uuid == null) { // the old format where there wasn't a UUID - uuid = SnapshotId.UNASSIGNED_UUID; + uuid = name; } return new SnapshotInfo(new SnapshotId(name, uuid), indices, diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 7741ef1c0e6..136f37eee71 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -46,6 +46,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; @@ -66,6 +67,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Function; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -208,8 +211,11 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements Map> newSnapshots = new HashMap<>(); // Now go through all snapshots and update existing or create missing final String localNodeId = clusterService.localNode().getId(); + final Map> snapshotIndices = new HashMap<>(); if (snapshotsInProgress != null) { for (SnapshotsInProgress.Entry entry : snapshotsInProgress.entries()) { + snapshotIndices.put(entry.snapshot(), + entry.indices().stream().collect(Collectors.toMap(IndexId::getName, Function.identity()))); if (entry.state() == SnapshotsInProgress.State.STARTED) { Map startedShards = new HashMap<>(); SnapshotShards snapshotShards = shardSnapshots.get(entry.snapshot()); @@ -289,14 +295,18 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements if (newSnapshots.isEmpty() == false) { Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT); for (final Map.Entry> entry : newSnapshots.entrySet()) { + Map indicesMap = snapshotIndices.get(entry.getKey()); + assert indicesMap != null; for (final Map.Entry shardEntry : entry.getValue().entrySet()) { final ShardId shardId = shardEntry.getKey(); try { final IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShardOrNull(shardId.id()); + final IndexId indexId = indicesMap.get(shardId.getIndexName()); + assert indexId != null; executor.execute(new AbstractRunnable() { @Override public void doRun() { - snapshot(indexShard, entry.getKey(), shardEntry.getValue()); + snapshot(indexShard, entry.getKey(), indexId, shardEntry.getValue()); updateIndexShardSnapshotStatus(entry.getKey(), shardId, new SnapshotsInProgress.ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.SUCCESS)); } @@ -321,7 +331,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements * @param snapshot snapshot * @param snapshotStatus snapshot status */ - private void snapshot(final IndexShard indexShard, final Snapshot snapshot, final IndexShardSnapshotStatus snapshotStatus) { + private void snapshot(final IndexShard indexShard, final Snapshot snapshot, final IndexId indexId, final IndexShardSnapshotStatus snapshotStatus) { Repository repository = snapshotsService.getRepositoriesService().repository(snapshot.getRepository()); ShardId shardId = indexShard.shardId(); if (!indexShard.routingEntry().primary()) { @@ -340,7 +350,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements // we flush first to make sure we get the latest writes snapshotted IndexCommit snapshotIndexCommit = indexShard.snapshotIndex(true); try { - repository.snapshotShard(indexShard, snapshot.getSnapshotId(), snapshotIndexCommit, snapshotStatus); + repository.snapshotShard(indexShard, snapshot.getSnapshotId(), indexId, snapshotIndexCommit, snapshotStatus); if (logger.isDebugEnabled()) { StringBuilder sb = new StringBuilder(); sb.append(" index : version [").append(snapshotStatus.indexVersion()).append("], number_of_files [").append(snapshotStatus.numberOfFiles()).append("] with total_size [").append(new ByteSizeValue(snapshotStatus.totalSize())).append("]\n"); diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index df56f2a24a6..1725536205f 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -56,8 +56,10 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.threadpool.ThreadPool; @@ -132,7 +134,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus public List snapshotIds(final String repositoryName) { Repository repository = repositoriesService.repository(repositoryName); assert repository != null; // should only be called once we've validated the repository exists - return repository.getSnapshots(); + return repository.getRepositoryData().getSnapshotIds(); } /** @@ -218,6 +220,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus final String snapshotName = request.snapshotName; validate(repositoryName, snapshotName); final SnapshotId snapshotId = new SnapshotId(snapshotName, UUIDs.randomBase64UUID()); // new UUID for the snapshot + final RepositoryData repositoryData = repositoriesService.repository(repositoryName).getRepositoryData(); clusterService.submitStateUpdateTask(request.cause(), new ClusterStateUpdateTask() { @@ -232,11 +235,12 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus // Store newSnapshot here to be processed in clusterStateProcessed List indices = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(currentState, request.indicesOptions(), request.indices())); logger.trace("[{}][{}] creating snapshot for indices [{}]", repositoryName, snapshotName, indices); + List snapshotIndices = repositoryData.resolveNewIndices(indices); newSnapshot = new SnapshotsInProgress.Entry(new Snapshot(repositoryName, snapshotId), request.includeGlobalState(), request.partial(), State.INIT, - indices, + snapshotIndices, System.currentTimeMillis(), null); snapshots = new SnapshotsInProgress(newSnapshot); @@ -334,8 +338,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus if (!snapshot.includeGlobalState()) { // Remove global state from the cluster state MetaData.Builder builder = MetaData.builder(); - for (String index : snapshot.indices()) { - builder.put(metaData.index(index), false); + for (IndexId index : snapshot.indices()) { + builder.put(metaData.index(index.getName()), false); } metaData = builder.build(); } @@ -473,7 +477,9 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus } private SnapshotInfo inProgressSnapshot(SnapshotsInProgress.Entry entry) { - return new SnapshotInfo(entry.snapshot().getSnapshotId(), entry.indices(), entry.startTime()); + return new SnapshotInfo(entry.snapshot().getSnapshotId(), + entry.indices().stream().map(IndexId::getName).collect(Collectors.toList()), + entry.startTime()); } /** @@ -546,8 +552,10 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus final SnapshotInfo snapshotInfo) throws IOException { Map shardStatus = new HashMap<>(); Repository repository = repositoriesService.repository(repositoryName); - MetaData metaData = repository.getSnapshotMetaData(snapshotInfo, snapshotInfo.indices()); + RepositoryData repositoryData = repository.getRepositoryData(); + MetaData metaData = repository.getSnapshotMetaData(snapshotInfo, repositoryData.resolveIndices(snapshotInfo.indices())); for (String index : snapshotInfo.indices()) { + IndexId indexId = repositoryData.resolveIndexId(index); IndexMetaData indexMetaData = metaData.indices().get(index); if (indexMetaData != null) { int numberOfShards = indexMetaData.getNumberOfShards(); @@ -561,7 +569,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus shardStatus.put(shardId, shardSnapshotStatus); } else { IndexShardSnapshotStatus shardSnapshotStatus = - repository.getShardSnapshotStatus(snapshotInfo.snapshotId(), snapshotInfo.version(), shardId); + repository.getShardSnapshotStatus(snapshotInfo.snapshotId(), snapshotInfo.version(), indexId, shardId); shardStatus.put(shardId, shardSnapshotStatus); } } @@ -953,7 +961,10 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus public void deleteSnapshot(final String repositoryName, final String snapshotName, final DeleteSnapshotListener listener) { // First, look for the snapshot in the repository final Repository repository = repositoriesService.repository(repositoryName); - Optional matchedEntry = repository.getSnapshots().stream().filter(s -> s.getName().equals(snapshotName)).findFirst(); + Optional matchedEntry = repository.getRepositoryData().getSnapshotIds() + .stream() + .filter(s -> s.getName().equals(snapshotName)) + .findFirst(); // if nothing found by the same name, then look in the cluster state for current in progress snapshots if (matchedEntry.isPresent() == false) { matchedEntry = currentSnapshots(repositoryName, Collections.emptyList()).stream() @@ -1121,21 +1132,22 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus * @param indices list of indices to be snapshotted * @return list of shard to be included into current snapshot */ - private ImmutableOpenMap shards(ClusterState clusterState, List indices) { + private ImmutableOpenMap shards(ClusterState clusterState, List indices) { ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); MetaData metaData = clusterState.metaData(); - for (String index : indices) { - IndexMetaData indexMetaData = metaData.index(index); + for (IndexId index : indices) { + final String indexName = index.getName(); + IndexMetaData indexMetaData = metaData.index(indexName); if (indexMetaData == null) { // The index was deleted before we managed to start the snapshot - mark it as missing. - builder.put(new ShardId(index, IndexMetaData.INDEX_UUID_NA_VALUE, 0), new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "missing index")); + builder.put(new ShardId(indexName, IndexMetaData.INDEX_UUID_NA_VALUE, 0), new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "missing index")); } else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) { ShardId shardId = new ShardId(indexMetaData.getIndex(), i); builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "index is closed")); } } else { - IndexRoutingTable indexRoutingTable = clusterState.getRoutingTable().index(index); + IndexRoutingTable indexRoutingTable = clusterState.getRoutingTable().index(indexName); for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) { ShardId shardId = new ShardId(indexMetaData.getIndex(), i); if (indexRoutingTable != null) { @@ -1191,8 +1203,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus for (final SnapshotsInProgress.Entry entry : snapshots.entries()) { if (entry.partial() == false) { if (entry.state() == State.INIT) { - for (String index : entry.indices()) { - IndexMetaData indexMetaData = currentState.metaData().index(index); + for (IndexId index : entry.indices()) { + IndexMetaData indexMetaData = currentState.metaData().index(index.getName()); if (indexMetaData != null && indices.contains(indexMetaData)) { if (indicesToFail == null) { indicesToFail = new HashSet<>(); diff --git a/core/src/main/java/org/elasticsearch/tasks/package-info.java b/core/src/main/java/org/elasticsearch/tasks/package-info.java new file mode 100644 index 00000000000..c967aaca508 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Support for viewing and modifying in flight actions ({@link org.elasticsearch.tasks.Task}s) and saving their results to an index. This + * includes getting detailed descriptions and canceling tasks that support it. + */ +package org.elasticsearch.tasks; diff --git a/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java index b30b978eddf..ce50fbc7e5d 100644 --- a/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.action; -import org.elasticsearch.action.DocWriteResponse.Operation; +import org.elasticsearch.action.DocWriteResponse.Result; import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.not; public class DocWriteResponseTests extends ESTestCase { public void testGetLocation() { - DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Operation.CREATE) { + DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Result.CREATED) { // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. }; assertEquals("/index/type/id", response.getLocation(null)); @@ -48,7 +48,7 @@ public class DocWriteResponseTests extends ESTestCase { * is true. We can't assert this in the yaml tests because "not found" is also "false" there.... */ public void testToXContentDoesntIncludeForcedRefreshUnlessForced() throws IOException { - DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Operation.CREATE) { + DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Result.CREATED) { // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. }; response.setShardInfo(new ShardInfo(1, 1)); diff --git a/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 8b6cfc08276..07931c54b06 100644 --- a/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -234,7 +234,7 @@ public class IndicesRequestIT extends ESIntegTestCase { client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").doc("field1", "value1"); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); - assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); clearInterceptedActions(); assertSameIndices(updateRequest, updateShardActions); @@ -248,7 +248,7 @@ public class IndicesRequestIT extends ESIntegTestCase { String indexOrAlias = randomIndexOrAlias(); UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").upsert("field", "value").doc("field1", "value1"); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); - assertEquals(DocWriteResponse.Operation.CREATE, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); clearInterceptedActions(); assertSameIndices(updateRequest, updateShardActions); @@ -264,7 +264,7 @@ public class IndicesRequestIT extends ESIntegTestCase { UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id") .script(new Script("ctx.op='delete'", ScriptService.ScriptType.INLINE, CustomScriptPlugin.NAME, Collections.emptyMap())); UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet(); - assertEquals(DocWriteResponse.Operation.DELETE, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult()); clearInterceptedActions(); assertSameIndices(updateRequest, updateShardActions); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index cea238db9ce..16502ff92b1 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -207,11 +207,11 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { .add(client().prepareIndex("test", "type", "2").setCreate(true).setSource("field", "1")) .add(client().prepareIndex("test", "type", "1").setSource("field", "2")).get(); - assertEquals(DocWriteResponse.Operation.CREATE, bulkResponse.getItems()[0].getResponse().getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult()); assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L)); - assertEquals(DocWriteResponse.Operation.CREATE, bulkResponse.getItems()[1].getResponse().getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[1].getResponse().getResult()); assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(1L)); - assertEquals(DocWriteResponse.Operation.INDEX, bulkResponse.getItems()[2].getResponse().getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, bulkResponse.getItems()[2].getResponse().getResult()); assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(2L)); bulkResponse = client().prepareBulk() @@ -232,11 +232,11 @@ public class BulkWithUpdatesIT extends ESIntegTestCase { .setSource("field", "2").setVersion(12).setVersionType(VersionType.EXTERNAL)) .get(); - assertEquals(DocWriteResponse.Operation.CREATE, bulkResponse.getItems()[0].getResponse().getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult()); assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(10L)); - assertEquals(DocWriteResponse.Operation.CREATE, bulkResponse.getItems()[1].getResponse().getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[1].getResponse().getResult()); assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(10L)); - assertEquals(DocWriteResponse.Operation.INDEX, bulkResponse.getItems()[2].getResponse().getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, bulkResponse.getItems()[2].getResponse().getResult()); assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(12L)); bulkResponse = client().prepareBulk() diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 9b67f128183..81b5290f63b 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -98,7 +98,7 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase { for (int i = 0; i < 10; i++) { // index data with mapping changes IndexResponse response = client(dataNode).prepareIndex("myindex", "mytype").setSource("field_" + i, "val").get(); - assertEquals(DocWriteResponse.Operation.CREATE, response.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); } } }); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java index 37d259ed1b4..a170fcd02f8 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java @@ -119,8 +119,8 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { for (int i = 0; i < numDocs; i++) { String routingKey = routing ? randomRealisticUnicodeOfLength(10) : null; String id = Integer.toString(i); - assertEquals(id, DocWriteResponse.Operation.CREATE, client().prepareIndex("test", "type1", id) - .setRouting(routingKey).setSource("field1", English.intToEnglish(i)).get().getOperation()); + assertEquals(id, DocWriteResponse.Result.CREATED, client().prepareIndex("test", "type1", id) + .setRouting(routingKey).setSource("field1", English.intToEnglish(i)).get().getResult()); GetResponse get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(1).get(); assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true)); assertThat(get.getVersion(), equalTo(1L)); @@ -478,7 +478,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs)); DeleteResponse deleteResponse = client().prepareDelete("test", "test", firstDocId).setRouting("routing").get(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); GetResponse getResponse = client().prepareGet("test", "test", firstDocId).setRouting("routing").get(); assertThat(getResponse.isExists(), equalTo(false)); refresh(); @@ -493,7 +493,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { int numDocs = iterations(10, 50); for (int i = 0; i < numDocs; i++) { IndexResponse indexResponse = client().prepareIndex(indexOrAlias(), "type", Integer.toString(i)).setSource("field", "value-" + i).get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); assertThat(indexResponse.getIndex(), equalTo("test")); assertThat(indexResponse.getType(), equalTo("type")); assertThat(indexResponse.getId(), equalTo(Integer.toString(i))); @@ -508,7 +508,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { assertThat(getResponse.getId(), equalTo(docId)); DeleteResponse deleteResponse = client().prepareDelete(indexOrAlias(), "type", docId).get(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getIndex(), equalTo("test")); assertThat(deleteResponse.getType(), equalTo("type")); assertThat(deleteResponse.getId(), equalTo(docId)); @@ -532,7 +532,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getType(), equalTo("type1")); assertThat(updateResponse.getId(), equalTo("1")); - assertEquals(DocWriteResponse.Operation.CREATE, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); GetResponse getResponse = client().prepareGet("test", "type1", "1").get(); assertThat(getResponse.isExists(), equalTo(true)); @@ -543,7 +543,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getType(), equalTo("type1")); assertThat(updateResponse.getId(), equalTo("1")); - assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); getResponse = client().prepareGet("test", "type1", "1").get(); assertThat(getResponse.isExists(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java index c29d83b4454..9bfcc554998 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java @@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; * as blob names and repository blob formats have changed between the snapshot versions. */ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) -// this test sometimes fails in recovery when the recovery is reset, increasing the logging level to help debug +// this test sometimes fails in recovery when the recovery is reset, increasing the logging level to help debug @TestLogging("indices.recovery:DEBUG") public class RepositoryUpgradabilityIT extends AbstractSnapshotIntegTestCase { @@ -70,7 +70,7 @@ public class RepositoryUpgradabilityIT extends AbstractSnapshotIntegTestCase { final Set snapshotInfos = Sets.newHashSet(getSnapshots(repoName)); assertThat(snapshotInfos.size(), equalTo(1)); SnapshotInfo originalSnapshot = snapshotInfos.iterator().next(); - assertThat(originalSnapshot.snapshotId(), equalTo(new SnapshotId("test_1", SnapshotId.UNASSIGNED_UUID))); + assertThat(originalSnapshot.snapshotId(), equalTo(new SnapshotId("test_1", "test_1"))); assertThat(Sets.newHashSet(originalSnapshot.indices()), equalTo(indices)); logger.info("--> restore the original snapshot"); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 68a0f73eb34..9ffabec6fc0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -53,7 +53,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; -import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collections; @@ -659,7 +658,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { randomBoolean(), randomBoolean(), SnapshotsInProgress.State.fromValue((byte) randomIntBetween(0, 6)), - Collections.emptyList(), + Collections.emptyList(), Math.abs(randomLong()), ImmutableOpenMap.of())); case 1: diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 0bacac191f9..466d3b4f83d 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -491,7 +491,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.trace("[{}] indexing id [{}] through node [{}] targeting shard [{}]", name, id, node, shard); IndexResponse response = client.prepareIndex("test", "type", id).setSource("{}").setTimeout(timeout).get(timeout); - assertEquals(DocWriteResponse.Operation.CREATE, response.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); ackedDocs.put(id, node); logger.trace("[{}] indexed id [{}] through node [{}]", name, id, node); } catch (ElasticsearchException e) { diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index c97d99c9216..932f42eaf0f 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -177,7 +177,7 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_2")); DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").get(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 2aa7b283be4..e71f42adb55 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -415,7 +415,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { try { final IndexResponse indexResponse = client().prepareIndex(IDX, "doc", Integer.toString(counter.incrementAndGet())).setSource("foo", "bar").get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } catch (Exception e) { exceptions.add(e); } @@ -508,7 +508,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { while (counter.get() < (numPhase1Docs + numPhase2Docs + numPhase3Docs)) { final IndexResponse indexResponse = client().prepareIndex(IDX, "doc", Integer.toString(counter.incrementAndGet())).setSource("foo", "bar").get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); final int docCount = counter.get(); if (docCount == numPhase1Docs) { phase1finished.countDown(); diff --git a/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java b/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java index a8eb13dc40f..378947ec345 100644 --- a/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java +++ b/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java @@ -84,7 +84,7 @@ public class WaitUntilRefreshIT extends ESIntegTestCase { // Now delete with blockUntilRefresh DeleteResponse delete = client().prepareDelete("test", "test", "1").setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get(); - assertEquals(DocWriteResponse.Operation.DELETE, delete.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, delete.getResult()); assertFalse("request shouldn't have forced a refresh", delete.forcedRefresh()); assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java index 62569d11657..ca4f7097bfd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java @@ -100,8 +100,8 @@ public class DynamicMappingIT extends ESIntegTestCase { public void run() { try { startLatch.await(); - assertEquals(DocWriteResponse.Operation.CREATE, client().prepareIndex("index", "type", id) - .setSource("field" + id, "bar").get().getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, client().prepareIndex("index", "type", id) + .setSource("field" + id, "bar").get().getResult()); } catch (Exception e) { error.compareAndSet(null, e); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java index aabb0f69bb4..2bce69f6ecf 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java @@ -137,13 +137,13 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase { .endObject().endObject()).get(); ensureGreen(); - assertEquals(DocWriteResponse.Operation.CREATE, prepareIndex("single", "I have four terms").get().getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, prepareIndex("single", "I have four terms").get().getResult()); BulkResponse bulk = client().prepareBulk() .add(prepareIndex("bulk1", "bulk three terms")) .add(prepareIndex("bulk2", "this has five bulk terms")).get(); assertFalse(bulk.buildFailureMessage(), bulk.hasFailures()); - assertEquals(DocWriteResponse.Operation.CREATE, - prepareIndex("multi", "two terms", "wow now I have seven lucky terms").get().getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, + prepareIndex("multi", "two terms", "wow now I have seven lucky terms").get().getResult()); bulk = client().prepareBulk() .add(prepareIndex("multibulk1", "one", "oh wow now I have eight unlucky terms")) .add(prepareIndex("multibulk2", "six is a bunch of terms", "ten! ten terms is just crazy! too many too count!")).get(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/LegacyDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/LegacyDateMappingTests.java index f35958c761b..92f9a9958fb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/LegacyDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/LegacyDateMappingTests.java @@ -449,7 +449,7 @@ public class LegacyDateMappingTests extends ESSingleNodeTestCase { ParsedDocument doc = defaultMapper.parse("test", "type", "1", document.bytes()); assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L)); IndexResponse indexResponse = client().prepareIndex("test2", "test").setSource(document).get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); // integers should always be parsed as well... cannot be sure it is a unix timestamp only doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() @@ -459,7 +459,7 @@ public class LegacyDateMappingTests extends ESSingleNodeTestCase { .bytes()); assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L)); indexResponse = client().prepareIndex("test", "test").setSource(document).get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } public void testThatNewIndicesOnlyAllowStrictDates() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java index fdf7049aced..920d2537935 100644 --- a/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.instanceOf; public class ScriptQueryBuilderTests extends AbstractQueryTestCase { @Override protected ScriptQueryBuilder doCreateTestQueryBuilder() { - String script = "5"; + String script = "1"; Map params = Collections.emptyMap(); return new ScriptQueryBuilder(new Script(script, ScriptType.INLINE, MockScriptEngine.NAME, params)); } diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index 6950a98631f..bd841a05ca1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -155,7 +155,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase params = Collections.emptyMap(); functionBuilder = new ScriptScoreFunctionBuilder( new Script(script, ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, params)); diff --git a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 85b4d54e4f9..d561536d2b9 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -30,8 +30,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest; -import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; @@ -82,8 +80,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportResponse; -import org.junit.After; -import org.junit.Before; import java.io.IOException; import java.nio.file.Files; @@ -257,7 +253,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", Integer.toString(docId.incrementAndGet())) .source("{}"); final IndexResponse response = index(indexRequest); - assertEquals(DocWriteResponse.Operation.CREATE, response.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); } return numOfDoc; } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 95a705f8e27..a412d37c111 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -100,7 +100,9 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; @@ -121,8 +123,10 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; @@ -1184,9 +1188,9 @@ public class IndexShardTests extends ESSingleNodeTestCase { test_target_shard.updateRoutingEntry(routing); DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); test_target_shard.markAsRecovering("store", new RecoveryState(routing.shardId(), routing.primary(), RecoveryState.Type.SNAPSHOT, routing.restoreSource(), localNode)); - assertTrue(test_target_shard.restoreFromRepository(new RestoreOnlyRepository() { + assertTrue(test_target_shard.restoreFromRepository(new RestoreOnlyRepository("test") { @Override - public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState) { + public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) { try { cleanLuceneIndex(targetStore.directory()); for (String file : sourceStore.directory().listAll()) { @@ -1645,8 +1649,10 @@ public class IndexShardTests extends ESSingleNodeTestCase { /** A dummy repository for testing which just needs restore overridden */ private abstract static class RestoreOnlyRepository extends AbstractLifecycleComponent implements Repository { - public RestoreOnlyRepository() { + private final String indexName; + public RestoreOnlyRepository(String indexName) { super(Settings.EMPTY); + this.indexName = indexName; } @Override protected void doStart() {} @@ -1663,17 +1669,19 @@ public class IndexShardTests extends ESSingleNodeTestCase { return null; } @Override - public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List indices) throws IOException { + public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List indices) throws IOException { return null; } @Override - public List getSnapshots() { - return null; + public RepositoryData getRepositoryData() { + Map> map = new HashMap<>(); + map.put(new IndexId(indexName, "blah"), Collections.emptySet()); + return new RepositoryData(Collections.emptyList(), map); } @Override - public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData metaData) {} + public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData metaData) {} @Override - public SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List indices, long startTime, String failure, int totalShards, List shardFailures) { + public SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List indices, long startTime, String failure, int totalShards, List shardFailures) { return null; } @Override @@ -1697,9 +1705,9 @@ public class IndexShardTests extends ESSingleNodeTestCase { return false; } @Override - public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {} + public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {} @Override - public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, ShardId shardId) { + public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) { return null; } @Override diff --git a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java index c3678fc63d2..ad540556664 100644 --- a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java @@ -94,15 +94,15 @@ public class IndexActionIT extends ESIntegTestCase { ensureGreen(); IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); - assertEquals(DocWriteResponse.Operation.INDEX, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); client().prepareDelete("test", "type", "1").execute().actionGet(); indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } @@ -111,14 +111,14 @@ public class IndexActionIT extends ESIntegTestCase { ensureGreen(); IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); client().prepareDelete("test", "type", "1").execute().actionGet(); flush(); indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } public void testCreatedFlagParallelExecution() throws Exception { @@ -139,7 +139,7 @@ public class IndexActionIT extends ESIntegTestCase { public Void call() throws Exception { int docId = random.nextInt(docCount); IndexResponse indexResponse = index("test", "type", Integer.toString(docId), "field1", "value"); - if (indexResponse.getOperation() == DocWriteResponse.Operation.CREATE) { + if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) { createdCounts.incrementAndGet(docId); } return null; @@ -161,7 +161,7 @@ public class IndexActionIT extends ESIntegTestCase { IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(123) .setVersionType(VersionType.EXTERNAL).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } public void testCreateFlagWithBulk() { @@ -172,7 +172,7 @@ public class IndexActionIT extends ESIntegTestCase { assertThat(bulkResponse.hasFailures(), equalTo(false)); assertThat(bulkResponse.getItems().length, equalTo(1)); IndexResponse indexResponse = bulkResponse.getItems()[0].getResponse(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } public void testCreateIndexWithLongName() { diff --git a/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java index 25be024bf14..e91ed066cc6 100644 --- a/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; @@ -76,15 +75,15 @@ public class DateMathIndexExpressionsIntegrationIT extends ESIntegTestCase { assertThat(indicesStatsResponse.getIndex(index3), notNullValue()); DeleteResponse deleteResponse = client().prepareDelete(dateMathExp1, "type", "1").get(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("1")); deleteResponse = client().prepareDelete(dateMathExp2, "type", "2").get(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("2")); deleteResponse = client().prepareDelete(dateMathExp3, "type", "3").get(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getId(), equalTo("3")); } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java index 3341700d5a3..ad4ea6567c2 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -56,9 +56,9 @@ public class IndexPrimaryRelocationIT extends ESIntegTestCase { public void run() { while (finished.get() == false) { IndexResponse indexResponse = client().prepareIndex("test", "type", "id").setSource("field", "value").get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); DeleteResponse deleteResponse = client().prepareDelete("test", "type", "id").get(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); } } }; diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 5328bddf56f..aee3dd227e8 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -35,7 +35,6 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -1037,8 +1036,8 @@ public class IndexStatsIT extends ESIntegTestCase { assertThat(stats.getTotal().queryCache.getCacheSize(), greaterThan(0L)); }); - assertEquals(DocWriteResponse.Operation.DELETE, client().prepareDelete("index", "type", "1").get().getOperation()); - assertEquals(DocWriteResponse.Operation.DELETE, client().prepareDelete("index", "type", "2").get().getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "type", "1").get().getResult()); + assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "type", "2").get().getResult()); refresh(); response = client().admin().indices().prepareStats("index").setQueryCache(true).get(); assertCumulativeQueryCacheStats(response); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 6f4534a415e..fbe41e70263 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -162,7 +162,7 @@ public class IngestClientIT extends ESIntegTestCase { itemResponse.isFailed(), is(false)); assertThat(indexResponse, notNullValue()); assertThat(indexResponse.getId(), equalTo(Integer.toString(i))); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } } } diff --git a/core/src/test/java/org/elasticsearch/repositories/IndexIdTests.java b/core/src/test/java/org/elasticsearch/repositories/IndexIdTests.java new file mode 100644 index 00000000000..30002d54a6b --- /dev/null +++ b/core/src/test/java/org/elasticsearch/repositories/IndexIdTests.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +/** + * Tests for the {@link IndexId} class. + */ +public class IndexIdTests extends ESTestCase { + + public void testEqualsAndHashCode() { + // assert equals and hashcode + String name = randomAsciiOfLength(8); + String id = UUIDs.randomBase64UUID(); + IndexId indexId1 = new IndexId(name, id); + IndexId indexId2 = new IndexId(name, id); + assertEquals(indexId1, indexId2); + assertEquals(indexId1.hashCode(), indexId2.hashCode()); + // assert equals when using index name for id + id = name; + indexId1 = new IndexId(name, id); + indexId2 = new IndexId(name, id); + assertEquals(indexId1, indexId2); + assertEquals(indexId1.hashCode(), indexId2.hashCode()); + //assert not equals when name or id differ + indexId2 = new IndexId(randomAsciiOfLength(8), id); + assertNotEquals(indexId1, indexId2); + assertNotEquals(indexId1.hashCode(), indexId2.hashCode()); + indexId2 = new IndexId(name, UUIDs.randomBase64UUID()); + assertNotEquals(indexId1, indexId2); + assertNotEquals(indexId1.hashCode(), indexId2.hashCode()); + } + + public void testSerialization() throws IOException { + IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()); + BytesStreamOutput out = new BytesStreamOutput(); + indexId.writeTo(out); + assertEquals(indexId, new IndexId(out.bytes().streamInput())); + } + + public void testXContent() throws IOException { + IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()); + XContentBuilder builder = JsonXContent.contentBuilder(); + indexId.toXContent(builder, ToXContent.EMPTY_PARAMS); + XContentParser parser = XContentType.JSON.xContent().createParser(builder.bytes()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + String name = null; + String id = null; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + final String currentFieldName = parser.currentName(); + parser.nextToken(); + if (currentFieldName.equals(IndexId.NAME)) { + name = parser.text(); + } else if (currentFieldName.equals(IndexId.ID)) { + id = parser.text(); + } + } + assertNotNull(name); + assertNotNull(id); + assertEquals(indexId, new IndexId(name, id)); + } +} diff --git a/core/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/core/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java new file mode 100644 index 00000000000..1fb34249fd2 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -0,0 +1,171 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.greaterThan; + +/** + * Tests for the {@link RepositoryData} class. + */ +public class RepositoryDataTests extends ESTestCase { + + public void testEqualsAndHashCode() { + RepositoryData repositoryData1 = generateRandomRepoData(); + RepositoryData repositoryData2 = repositoryData1.copy(); + assertEquals(repositoryData1, repositoryData2); + assertEquals(repositoryData1.hashCode(), repositoryData2.hashCode()); + } + + public void testXContent() throws IOException { + RepositoryData repositoryData = generateRandomRepoData(); + XContentBuilder builder = JsonXContent.contentBuilder(); + repositoryData.toXContent(builder, ToXContent.EMPTY_PARAMS); + XContentParser parser = XContentType.JSON.xContent().createParser(builder.bytes()); + assertEquals(repositoryData, RepositoryData.fromXContent(parser)); + } + + public void testAddSnapshots() { + RepositoryData repositoryData = generateRandomRepoData(); + // test that adding the same snapshot id to the repository data throws an exception + final SnapshotId snapshotId = repositoryData.getSnapshotIds().get(0); + Map indexIdMap = repositoryData.getIndices(); + expectThrows(IllegalArgumentException.class, + () -> repositoryData.addSnapshot(new SnapshotId(snapshotId.getName(), snapshotId.getUUID()), Collections.emptyList())); + // test that adding a snapshot and its indices works + SnapshotId newSnapshot = new SnapshotId(randomAsciiOfLength(7), UUIDs.randomBase64UUID()); + List indices = new ArrayList<>(); + Set newIndices = new HashSet<>(); + int numNew = randomIntBetween(1, 10); + for (int i = 0; i < numNew; i++) { + IndexId indexId = new IndexId(randomAsciiOfLength(7), UUIDs.randomBase64UUID()); + newIndices.add(indexId); + indices.add(indexId); + } + int numOld = randomIntBetween(1, indexIdMap.size()); + List indexNames = new ArrayList<>(indexIdMap.keySet()); + for (int i = 0; i < numOld; i++) { + indices.add(indexIdMap.get(indexNames.get(i))); + } + RepositoryData newRepoData = repositoryData.addSnapshot(newSnapshot, indices); + // verify that the new repository data has the new snapshot and its indices + assertTrue(newRepoData.getSnapshotIds().contains(newSnapshot)); + for (IndexId indexId : indices) { + Set snapshotIds = newRepoData.getSnapshots(indexId); + assertTrue(snapshotIds.contains(newSnapshot)); + if (newIndices.contains(indexId)) { + assertEquals(snapshotIds.size(), 1); // if it was a new index, only the new snapshot should be in its set + } + } + } + + public void testInitIndices() { + final int numSnapshots = randomIntBetween(1, 30); + final List snapshotIds = new ArrayList<>(numSnapshots); + for (int i = 0; i < numSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + RepositoryData repositoryData = new RepositoryData(snapshotIds, Collections.emptyMap()); + // test that initializing indices works + Map> indices = randomIndices(snapshotIds); + RepositoryData newRepoData = repositoryData.initIndices(indices); + assertEquals(repositoryData.getSnapshotIds(), newRepoData.getSnapshotIds()); + for (IndexId indexId : indices.keySet()) { + assertEquals(indices.get(indexId), newRepoData.getSnapshots(indexId)); + } + } + + public void testRemoveSnapshot() { + RepositoryData repositoryData = generateRandomRepoData(); + List snapshotIds = new ArrayList<>(repositoryData.getSnapshotIds()); + assertThat(snapshotIds.size(), greaterThan(0)); + SnapshotId removedSnapshotId = snapshotIds.remove(randomIntBetween(0, snapshotIds.size() - 1)); + RepositoryData newRepositoryData = repositoryData.removeSnapshot(removedSnapshotId); + // make sure the repository data's indices no longer contain the removed snapshot + for (final IndexId indexId : newRepositoryData.getIndices().values()) { + assertFalse(newRepositoryData.getSnapshots(indexId).contains(removedSnapshotId)); + } + } + + public void testResolveIndexId() { + RepositoryData repositoryData = generateRandomRepoData(); + Map indices = repositoryData.getIndices(); + Set indexNames = indices.keySet(); + assertThat(indexNames.size(), greaterThan(0)); + String indexName = indexNames.iterator().next(); + IndexId indexId = indices.get(indexName); + assertEquals(indexId, repositoryData.resolveIndexId(indexName)); + String notInRepoData = randomAsciiOfLength(5); + assertFalse(indexName.contains(notInRepoData)); + assertEquals(new IndexId(notInRepoData, notInRepoData), repositoryData.resolveIndexId(notInRepoData)); + } + + public static RepositoryData generateRandomRepoData() { + return generateRandomRepoData(new ArrayList<>()); + } + + public static RepositoryData generateRandomRepoData(final List origSnapshotIds) { + List snapshotIds = randomSnapshots(origSnapshotIds); + return new RepositoryData(snapshotIds, randomIndices(snapshotIds)); + } + + private static List randomSnapshots(final List origSnapshotIds) { + final int numSnapshots = randomIntBetween(1, 30); + final List snapshotIds = new ArrayList<>(origSnapshotIds); + for (int i = 0; i < numSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + return snapshotIds; + } + + private static Map> randomIndices(final List snapshotIds) { + final int totalSnapshots = snapshotIds.size(); + final int numIndices = randomIntBetween(1, 30); + final Map> indices = new HashMap<>(numIndices); + for (int i = 0; i < numIndices; i++) { + final IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()); + final Set indexSnapshots = new LinkedHashSet<>(); + final int numIndicesForSnapshot = randomIntBetween(1, numIndices); + for (int j = 0; j < numIndicesForSnapshot; j++) { + indexSnapshots.add(snapshotIds.get(randomIntBetween(0, totalSnapshots - 1))); + } + indices.put(indexId, indexSnapshots); + } + return indices; + } +} diff --git a/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index c9d8ff81aa2..6c4af1f7737 100644 --- a/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -28,11 +28,11 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -44,7 +44,7 @@ import java.util.Collections; import java.util.List; import java.util.stream.Collectors; -import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.blobId; +import static org.elasticsearch.repositories.RepositoryDataTests.generateRandomRepoData; import static org.hamcrest.Matchers.equalTo; /** @@ -109,86 +109,56 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { public void testReadAndWriteSnapshotsThroughIndexFile() throws Exception { final BlobStoreRepository repository = setupRepo(); - // write to and read from a snapshot file with no entries - assertThat(repository.getSnapshots().size(), equalTo(0)); - repository.writeSnapshotsToIndexGen(Collections.emptyList()); + // write to and read from a index file with no entries assertThat(repository.getSnapshots().size(), equalTo(0)); + final RepositoryData emptyData = RepositoryData.EMPTY; + repository.writeIndexGen(emptyData); + final RepositoryData readData = repository.getRepositoryData(); + assertEquals(readData, emptyData); + assertEquals(readData.getIndices().size(), 0); + assertEquals(readData.getSnapshotIds().size(), 0); - // write to and read from a snapshot file with a random number of entries - final int numSnapshots = randomIntBetween(1, 1000); + // write to and read from an index file with snapshots but no indices + final int numSnapshots = randomIntBetween(1, 20); final List snapshotIds = new ArrayList<>(numSnapshots); for (int i = 0; i < numSnapshots; i++) { snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); } - repository.writeSnapshotsToIndexGen(snapshotIds); - assertThat(repository.getSnapshots(), equalTo(snapshotIds)); + RepositoryData repositoryData = new RepositoryData(snapshotIds, Collections.emptyMap()); + repository.writeIndexGen(repositoryData); + assertEquals(repository.getRepositoryData(), repositoryData); + + // write to and read from a index file with random repository data + repositoryData = generateRandomRepoData(); + repository.writeIndexGen(repositoryData); + assertThat(repository.getRepositoryData(), equalTo(repositoryData)); } public void testIndexGenerationalFiles() throws Exception { final BlobStoreRepository repository = setupRepo(); // write to index generational file - final int numSnapshots = randomIntBetween(1, 1000); - final List snapshotIds = new ArrayList<>(numSnapshots); - for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); - } - repository.writeSnapshotsToIndexGen(snapshotIds); - assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + RepositoryData repositoryData = generateRandomRepoData(); + repository.writeIndexGen(repositoryData); + assertThat(repository.getRepositoryData(), equalTo(repositoryData)); assertThat(repository.latestIndexBlobId(), equalTo(0L)); assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(0L)); // adding more and writing to a new index generational file - for (int i = 0; i < 10; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); - } - repository.writeSnapshotsToIndexGen(snapshotIds); - assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + repositoryData = generateRandomRepoData(); + repository.writeIndexGen(repositoryData); + assertEquals(repository.getRepositoryData(), repositoryData); assertThat(repository.latestIndexBlobId(), equalTo(1L)); assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(1L)); - // removing a snapshot adn writing to a new index generational file - snapshotIds.remove(0); - repository.writeSnapshotsToIndexGen(snapshotIds); - assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + // removing a snapshot and writing to a new index generational file + repositoryData = repositoryData.removeSnapshot(repositoryData.getSnapshotIds().get(0)); + repository.writeIndexGen(repositoryData); + assertEquals(repository.getRepositoryData(), repositoryData); assertThat(repository.latestIndexBlobId(), equalTo(2L)); assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(2L)); } - public void testOldIndexFileFormat() throws Exception { - final BlobStoreRepository repository = setupRepo(); - - // write old index file format - final int numOldSnapshots = randomIntBetween(1, 50); - final List snapshotIds = new ArrayList<>(); - for (int i = 0; i < numOldSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), SnapshotId.UNASSIGNED_UUID)); - } - writeOldFormat(repository, snapshotIds.stream().map(SnapshotId::getName).collect(Collectors.toList())); - assertThat(Sets.newHashSet(repository.getSnapshots()), equalTo(Sets.newHashSet(snapshotIds))); - - // write to and read from a snapshot file with a random number of new entries added - final int numSnapshots = randomIntBetween(1, 1000); - for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); - } - repository.writeSnapshotsToIndexGen(snapshotIds); - assertThat(Sets.newHashSet(repository.getSnapshots()), equalTo(Sets.newHashSet(snapshotIds))); - } - - public void testBlobId() { - SnapshotId snapshotId = new SnapshotId("abc123", SnapshotId.UNASSIGNED_UUID); - assertThat(blobId(snapshotId), equalTo("abc123")); // just the snapshot name - snapshotId = new SnapshotId("abc-123", SnapshotId.UNASSIGNED_UUID); - assertThat(blobId(snapshotId), equalTo("abc-123")); // just the snapshot name - String uuid = UUIDs.randomBase64UUID(); - snapshotId = new SnapshotId("abc123", uuid); - assertThat(blobId(snapshotId), equalTo("abc123-" + uuid)); // snapshot name + '-' + uuid - uuid = UUIDs.randomBase64UUID(); - snapshotId = new SnapshotId("abc-123", uuid); - assertThat(blobId(snapshotId), equalTo("abc-123-" + uuid)); // snapshot name + '-' + uuid - } - private BlobStoreRepository setupRepo() { final Client client = client(); final Path location = ESIntegTestCase.randomRepoPath(node().settings()); diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index f932317085b..954de105a4f 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -37,14 +37,16 @@ public class FileScriptTests extends ESTestCase { Path scriptsDir = homeDir.resolve("config").resolve("scripts"); Files.createDirectories(scriptsDir); Path mockscript = scriptsDir.resolve("script1.mockscript"); - Files.write(mockscript, "1".getBytes("UTF-8")); + String scriptSource = "1"; + Files.write(mockscript, scriptSource.getBytes("UTF-8")); settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), homeDir) // no file watching, so we don't need a ResourceWatcherService .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) .put(settings) .build(); - ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(new MockScriptEngine())); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap(scriptSource, script -> "1")); + ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(scriptEngine)); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); return new ScriptService(settings, new Environment(settings), null, scriptEngineRegistry, scriptContextRegistry, scriptSettings); diff --git a/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java b/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java new file mode 100644 index 00000000000..7e57d41acea --- /dev/null +++ b/core/src/test/java/org/elasticsearch/script/IndexLookupIT.java @@ -0,0 +1,1025 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.lookup.IndexField; +import org.elasticsearch.search.lookup.IndexFieldTerm; +import org.elasticsearch.search.lookup.IndexLookup; +import org.elasticsearch.search.lookup.LeafIndexLookup; +import org.elasticsearch.search.lookup.TermPosition; +import org.elasticsearch.test.ESIntegTestCase; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.script.ScriptService.ScriptType; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public class IndexLookupIT extends ESIntegTestCase { + + private static final String INCLUDE_ALL = "_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS|_CACHE"; + private static final int ALL_FLAGS = IndexLookup.FLAG_FREQUENCIES + | IndexLookup.FLAG_OFFSETS + | IndexLookup.FLAG_PAYLOADS + | IndexLookup.FLAG_POSITIONS + | IndexLookup.FLAG_CACHE; + + private static final String INCLUDE_ALL_BUT_CACHE = "_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS"; + private static final int ALL_FLAGS_WITHOUT_CACHE = IndexLookup.FLAG_FREQUENCIES + | IndexLookup.FLAG_OFFSETS + | IndexLookup.FLAG_PAYLOADS + | IndexLookup.FLAG_POSITIONS; + + private HashMap> expectedEndOffsetsArray; + private HashMap> expectedPayloadsArray; + private HashMap> expectedPositionsArray; + private HashMap> expectedStartOffsetsArray; + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("term = _index['int_payload_field']['c']; term.tf()", vars -> tf(vars, "int_payload_field", "c")); + scripts.put("term = _index['int_payload_field']['b']; term.tf()", vars -> tf(vars, "int_payload_field", "b")); + + scripts.put("Sum the payloads of [float_payload_field][b]", vars -> payloadSum(vars, "float_payload_field", "b")); + scripts.put("Sum the payloads of [int_payload_field][b]", vars -> payloadSum(vars, "int_payload_field", "b")); + + scripts.put("createPositionsArrayScriptIterateTwice[b," + INCLUDE_ALL + ",position]", + vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS, p -> p.position)); + scripts.put("createPositionsArrayScriptIterateTwice[b," + INCLUDE_ALL + ",startOffset]", + vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS, p -> p.startOffset)); + scripts.put("createPositionsArrayScriptIterateTwice[b," + INCLUDE_ALL + ",endOffset]", + vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS, p -> p.endOffset)); + scripts.put("createPositionsArrayScriptIterateTwice[b," + INCLUDE_ALL + ",payloadAsInt(-1)]", + vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS, p -> p.payloadAsInt(-1))); + + scripts.put("createPositionsArrayScriptIterateTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,position]", + vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.position)); + scripts.put("createPositionsArrayScriptIterateTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,startOffset]", + vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.startOffset)); + scripts.put("createPositionsArrayScriptIterateTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,endOffset]", + vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.endOffset)); + scripts.put("createPositionsArrayScriptIterateTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,payloadAsInt(-1)]", + vars -> createPositionsArrayScriptIterateTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.payloadAsInt(-1))); + + scripts.put("createPositionsArrayScriptGetInfoObjectTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,position]", + vars -> createPositionsArrayScriptGetInfoObjectTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.position)); + scripts.put("createPositionsArrayScriptGetInfoObjectTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,startOffset]", + vars -> createPositionsArrayScriptGetInfoObjectTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.startOffset)); + scripts.put("createPositionsArrayScriptGetInfoObjectTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,endOffset]", + vars -> createPositionsArrayScriptGetInfoObjectTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.endOffset)); + scripts.put("createPositionsArrayScriptGetInfoObjectTwice[b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,payloadAsInt(-1)]", + vars -> createPositionsArrayScriptGetInfoObjectTwice(vars, "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.payloadAsInt(-1))); + + scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS,position]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_POSITIONS, p -> p.position)); + + scripts.put("createPositionsArrayScript[int_payload_field,b,_OFFSETS,position]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_OFFSETS, p -> p.position)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_OFFSETS,startOffset]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_OFFSETS, p -> p.startOffset)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_OFFSETS,endOffset]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_OFFSETS, p -> p.endOffset)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_OFFSETS,payloadAsInt(-1)]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_OFFSETS, p -> p.payloadAsInt(-1))); + + scripts.put("createPositionsArrayScript[int_payload_field,b,_PAYLOADS,position]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_PAYLOADS, p -> p.position)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_PAYLOADS,startOffset]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_PAYLOADS, p -> p.startOffset)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_PAYLOADS,endOffset]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_PAYLOADS, p -> p.endOffset)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_PAYLOADS,payloadAsInt(-1)]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", IndexLookup.FLAG_PAYLOADS, p -> p.payloadAsInt(-1))); + + int posoffpay = IndexLookup.FLAG_POSITIONS|IndexLookup.FLAG_OFFSETS|IndexLookup.FLAG_PAYLOADS; + scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS|_OFFSETS|_PAYLOADS,position]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", posoffpay, p -> p.position)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS|_OFFSETS|_PAYLOADS,startOffset]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", posoffpay, p -> p.startOffset)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS|_OFFSETS|_PAYLOADS,endOffset]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", posoffpay, p -> p.endOffset)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_POSITIONS|_OFFSETS|_PAYLOADS,payloadAsInt(-1)]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", posoffpay, p -> p.payloadAsInt(-1))); + + scripts.put("createPositionsArrayScript[int_payload_field,b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,position]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.position)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,startOffset]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.startOffset)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,endOffset]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.endOffset)); + scripts.put("createPositionsArrayScript[int_payload_field,b,_FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS,payloadAsInt(-1)]", + vars -> createPositionsArrayScript(vars, "int_payload_field", "b", ALL_FLAGS_WITHOUT_CACHE, p -> p.payloadAsInt(-1))); + + scripts.put("createPositionsArrayScript" + + "[float_payload_field,b," + INCLUDE_ALL + ",payloadAsFloat(-1)]", + vars -> createPositionsArrayScript(vars,"float_payload_field", "b", ALL_FLAGS, p -> p.payloadAsFloat(-1))); + scripts.put("createPositionsArrayScript" + + "[string_payload_field,b," + INCLUDE_ALL + ",payloadAsString()]", + vars -> createPositionsArrayScript(vars,"string_payload_field", "b", ALL_FLAGS, TermPosition::payloadAsString)); + scripts.put("createPositionsArrayScript" + + "[int_payload_field,c," + INCLUDE_ALL + ",payloadAsInt(-1)]", + vars -> createPositionsArrayScript(vars,"int_payload_field", "c", ALL_FLAGS, p -> p.payloadAsInt(-1))); + + // Call with different flags twice, equivalent to: + // term = _index['int_payload_field']['b']; return _index['int_payload_field'].get('b', _POSITIONS).tf(); + scripts.put("Call with different flags twice", vars -> { + LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index"); + IndexField indexField = leafIndexLookup.get("int_payload_field"); + + // 1st call + indexField.get("b"); + try { + // 2nd call, must throws an exception + return indexField.get("b", IndexLookup.FLAG_POSITIONS).tf(); + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "Call with different flags twice", CustomScriptPlugin.NAME); + } + }); + + // Call with same flags twice: equivalent to: + // term = _index['int_payload_field'].get('b', _POSITIONS | _FREQUENCIES);return _index['int_payload_field']['b'].tf(); + scripts.put("Call with same flags twice", vars -> { + LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index"); + IndexField indexField = leafIndexLookup.get("int_payload_field"); + + // 1st call + indexField.get("b", IndexLookup.FLAG_POSITIONS | IndexLookup.FLAG_FREQUENCIES); + try { + // 2nd call, must throws an exception + return indexField.get("b").tf(); + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "Call with same flags twice", CustomScriptPlugin.NAME); + } + }); + + // get the number of all docs + scripts.put("_index.numDocs()", + vars -> ((LeafIndexLookup) vars.get("_index")).numDocs()); + + // get the number of docs with field float_payload_field + scripts.put("_index['float_payload_field'].docCount()", + vars -> indexFieldScript(vars, "float_payload_field", indexField -> { + try { + return indexField.docCount(); + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "docCount()", CustomScriptPlugin.NAME); + } + })); + + // corner case: what if the field does not exist? + scripts.put("_index['non_existent_field'].docCount()", + vars -> indexFieldScript(vars, "non_existent_field", indexField -> { + try { + return indexField.docCount(); + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "docCount()", CustomScriptPlugin.NAME); + } + })); + + // get the number of all tokens in all docs + scripts.put("_index['float_payload_field'].sumttf()", + vars -> indexFieldScript(vars, "float_payload_field", indexField -> { + try { + return indexField.sumttf(); + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "sumttf()", CustomScriptPlugin.NAME); + } + })); + + // corner case get the number of all tokens in all docs for non existent + // field + scripts.put("_index['non_existent_field'].sumttf()", + vars -> indexFieldScript(vars, "non_existent_field", indexField -> { + try { + return indexField.sumttf(); + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "sumttf()", CustomScriptPlugin.NAME); + } + })); + + // get the sum of doc freqs in all docs + scripts.put("_index['float_payload_field'].sumdf()", + vars -> indexFieldScript(vars, "float_payload_field", indexField -> { + try { + return indexField.sumdf(); + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "sumdf()", CustomScriptPlugin.NAME); + } + })); + + // get the sum of doc freqs in all docs for non existent field + scripts.put("_index['non_existent_field'].sumdf()", + vars -> indexFieldScript(vars, "non_existent_field", indexField -> { + try { + return indexField.sumdf(); + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "sumdf()", CustomScriptPlugin.NAME); + } + })); + + // check term frequencies for 'a' + scripts.put("term = _index['float_payload_field']['a']; if (term != null) {term.tf()}", + vars -> indexFieldTermScript(vars, "float_payload_field", "a", indexFieldTerm -> { + try { + if (indexFieldTerm != null) { + return indexFieldTerm.tf(); + } + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "term.tf()", CustomScriptPlugin.NAME); + } + return null; + })); + + // check doc frequencies for 'c' + scripts.put("term = _index['float_payload_field']['c']; if (term != null) {term.df()}", + vars -> indexFieldTermScript(vars, "float_payload_field", "c", indexFieldTerm -> { + try { + if (indexFieldTerm != null) { + return indexFieldTerm.df(); + } + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "term.df()", CustomScriptPlugin.NAME); + } + return null; + })); + + // check doc frequencies for term that does not exist + scripts.put("term = _index['float_payload_field']['non_existent_term']; if (term != null) {term.df()}", + vars -> indexFieldTermScript(vars, "float_payload_field", "non_existent_term", indexFieldTerm -> { + try { + if (indexFieldTerm != null) { + return indexFieldTerm.df(); + } + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "term.df()", CustomScriptPlugin.NAME); + } + return null; + })); + + // check doc frequencies for term that does not exist + scripts.put("term = _index['non_existent_field']['non_existent_term']; if (term != null) {term.tf()}", + vars -> indexFieldTermScript(vars, "non_existent_field", "non_existent_term", indexFieldTerm -> { + try { + if (indexFieldTerm != null) { + return indexFieldTerm.tf(); + } + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "term.tf()", CustomScriptPlugin.NAME); + } + return null; + })); + + // check total term frequencies for 'a' + scripts.put("term = _index['float_payload_field']['a']; if (term != null) {term.ttf()}", + vars -> indexFieldTermScript(vars, "float_payload_field", "a", indexFieldTerm -> { + try { + if (indexFieldTerm != null) { + return indexFieldTerm.ttf(); + } + } catch (IOException e) { + throw new ScriptException(e.getMessage(), e, emptyList(), "term.ttf()", CustomScriptPlugin.NAME); + } + return null; + })); + + return scripts; + } + + @SuppressWarnings("unchecked") + static Object indexFieldScript(Map vars, String fieldName, Function fn) { + LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index"); + return fn.apply(leafIndexLookup.get(fieldName)); + } + + @SuppressWarnings("unchecked") + static Object indexFieldTermScript(Map vars, String fieldName, String term, Function fn) { + return indexFieldScript(vars, fieldName, indexField -> fn.apply(indexField.get(term))); + } + + @SuppressWarnings("unchecked") + static Object tf(Map vars, String fieldName, String term) { + return indexFieldTermScript(vars, fieldName, term, indexFieldTerm -> { + try { + return indexFieldTerm.tf(); + } catch (IOException e) { + throw new RuntimeException("Mocked script error when retrieving TF for [" + fieldName + "][" + term + "]"); + } + }); + } + + // Sum the payloads for a given field term, equivalent to: + // term = _index['float_payload_field'].get('b', _FREQUENCIES|_OFFSETS|_PAYLOADS|_POSITIONS|_CACHE); + // payloadSum=0; + // for (pos in term) { + // payloadSum += pos.payloadAsInt(0) + // }; + // return payloadSum; + @SuppressWarnings("unchecked") + static Object payloadSum(Map vars, String fieldName, String term) { + return indexFieldScript(vars, fieldName, indexField -> { + IndexFieldTerm indexFieldTerm = indexField.get(term, IndexLookup.FLAG_FREQUENCIES + | IndexLookup.FLAG_OFFSETS + | IndexLookup.FLAG_PAYLOADS + | IndexLookup.FLAG_POSITIONS + | IndexLookup.FLAG_CACHE); + int payloadSum = 0; + for (TermPosition position : indexFieldTerm) { + payloadSum += position.payloadAsInt(0); + } + return payloadSum; + }); + } + + @SuppressWarnings("unchecked") + static List createPositionsArrayScriptGetInfoObjectTwice(Map vars, String term, int flags, + Function fn) { + LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index"); + IndexField indexField = leafIndexLookup.get("int_payload_field"); + + // 1st call + IndexFieldTerm indexFieldTerm = indexField.get(term, flags); + + List array = new ArrayList<>(); + for (TermPosition position : indexFieldTerm) { + array.add(fn.apply(position)); + } + + // 2nd call + indexField.get(term, flags); + + array = new ArrayList<>(); + for (TermPosition position : indexFieldTerm) { + array.add(fn.apply(position)); + } + + return array; + } + + @SuppressWarnings("unchecked") + static List createPositionsArrayScriptIterateTwice(Map vars, String term, int flags, + Function fn) { + LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index"); + IndexField indexField = leafIndexLookup.get("int_payload_field"); + + IndexFieldTerm indexFieldTerm = indexField.get(term, flags); + + // 1st iteration + List array = new ArrayList<>(); + for (TermPosition position : indexFieldTerm) { + array.add(fn.apply(position)); + } + + // 2nd iteration + array = new ArrayList<>(); + for (TermPosition position : indexFieldTerm) { + array.add(fn.apply(position)); + } + + return array; + } + + @SuppressWarnings("unchecked") + static List createPositionsArrayScript(Map vars, String field, String term, int flags, + Function fn) { + + LeafIndexLookup leafIndexLookup = (LeafIndexLookup) vars.get("_index"); + IndexField indexField = leafIndexLookup.get(field); + + IndexFieldTerm indexFieldTerm = indexField.get(term, flags); + List array = new ArrayList<>(); + for (TermPosition position : indexFieldTerm) { + array.add(fn.apply(position)); + } + return array; + } + } + + void initTestData() throws InterruptedException, ExecutionException, IOException { + HashMap> emptyArray = new HashMap<>(); + List empty1 = new ArrayList<>(); + empty1.add(-1); + empty1.add(-1); + emptyArray.put("1", empty1); + List empty2 = new ArrayList<>(); + empty2.add(-1); + empty2.add(-1); + emptyArray.put("2", empty2); + List empty3 = new ArrayList<>(); + empty3.add(-1); + empty3.add(-1); + emptyArray.put("3", empty3); + + expectedPositionsArray = new HashMap<>(); + + List pos1 = new ArrayList<>(); + pos1.add(1); + pos1.add(2); + expectedPositionsArray.put("1", pos1); + List pos2 = new ArrayList<>(); + pos2.add(0); + pos2.add(1); + expectedPositionsArray.put("2", pos2); + List pos3 = new ArrayList<>(); + pos3.add(0); + pos3.add(4); + expectedPositionsArray.put("3", pos3); + + expectedPayloadsArray = new HashMap<>(); + List pay1 = new ArrayList<>(); + pay1.add(2); + pay1.add(3); + expectedPayloadsArray.put("1", pay1); + List pay2 = new ArrayList<>(); + pay2.add(1); + pay2.add(2); + expectedPayloadsArray.put("2", pay2); + List pay3 = new ArrayList<>(); + pay3.add(1); + pay3.add(-1); + expectedPayloadsArray.put("3", pay3); + /* + * "a|1 b|2 b|3 c|4 d " "b|1 b|2 c|3 d|4 a " "b|1 c|2 d|3 a|4 b " + */ + expectedStartOffsetsArray = new HashMap<>(); + List starts1 = new ArrayList<>(); + starts1.add(4); + starts1.add(8); + expectedStartOffsetsArray.put("1", starts1); + List starts2 = new ArrayList<>(); + starts2.add(0); + starts2.add(4); + expectedStartOffsetsArray.put("2", starts2); + List starts3 = new ArrayList<>(); + starts3.add(0); + starts3.add(16); + expectedStartOffsetsArray.put("3", starts3); + + expectedEndOffsetsArray = new HashMap<>(); + List ends1 = new ArrayList<>(); + ends1.add(7); + ends1.add(11); + expectedEndOffsetsArray.put("1", ends1); + List ends2 = new ArrayList<>(); + ends2.add(3); + ends2.add(7); + expectedEndOffsetsArray.put("2", ends2); + List ends3 = new ArrayList<>(); + ends3.add(3); + ends3.add(17); + expectedEndOffsetsArray.put("3", ends3); + + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("int_payload_field") + .field("type", "text") + .field("index_options", "offsets") + .field("analyzer", "payload_int") + .endObject() + .endObject() + .endObject() + .endObject(); + + assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings( + Settings.builder() + .put(indexSettings()) + .put("index.analysis.analyzer.payload_int.tokenizer", "whitespace") + .putArray("index.analysis.analyzer.payload_int.filter", "delimited_int") + .put("index.analysis.filter.delimited_int.delimiter", "|") + .put("index.analysis.filter.delimited_int.encoding", "int") + .put("index.analysis.filter.delimited_int.type", "delimited_payload_filter"))); + indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("int_payload_field", "a|1 b|2 b|3 c|4 d "), client() + .prepareIndex("test", "type1", "2").setSource("int_payload_field", "b|1 b|2 c|3 d|4 a "), + client().prepareIndex("test", "type1", "3").setSource("int_payload_field", "b|1 c|2 d|3 a|4 b ")); + ensureGreen(); + } + + public void testTwoScripts() throws Exception { + initTestData(); + + Script scriptFieldScript = createScript("term = _index['int_payload_field']['c']; term.tf()"); + Script scoreScript = createScript("term = _index['int_payload_field']['b']; term.tf()"); + Map expectedResultsField = new HashMap<>(); + expectedResultsField.put("1", 1); + expectedResultsField.put("2", 1); + expectedResultsField.put("3", 1); + Map expectedResultsScore = new HashMap<>(); + expectedResultsScore.put("1", 2f); + expectedResultsScore.put("2", 2f); + expectedResultsScore.put("3", 2f); + checkOnlyFunctionScore(scoreScript, expectedResultsScore, 3); + checkValueInEachDocWithFunctionScore(scriptFieldScript, expectedResultsField, scoreScript, expectedResultsScore, 3); + + } + + public void testCallWithDifferentFlagsFails() throws Exception { + initTestData(); + final int numPrimaries = getNumShards("test").numPrimaries; + final String expectedError = "You must call get with all required flags! " + + "Instead of _index['int_payload_field'].get('b', _FREQUENCIES) and _index['int_payload_field'].get('b', _POSITIONS)" + + " call _index['int_payload_field'].get('b', _FREQUENCIES | _POSITIONS) once]"; + + // should throw an exception, we cannot call with different flags twice + // if the flags of the second call were not included in the first call. + Script script = createScript("Call with different flags twice"); + try { + SearchResponse response = client().prepareSearch("test") + .setQuery(QueryBuilders.matchAllQuery()) + .addScriptField("tvtest", script) + .get(); + + assertThat(numPrimaries, greaterThan(1)); + assertThat(response.getFailedShards(), greaterThanOrEqualTo(1)); + + for (ShardSearchFailure failure : response.getShardFailures()) { + assertThat(failure.reason(), containsString(expectedError)); + } + } catch (SearchPhaseExecutionException e) { + assertThat(numPrimaries, equalTo(1)); + assertThat(e.toString(), containsString(expectedError)); + } + + // Should not throw an exception this way round + script = createScript("Call with same flags twice"); + assertThat(client().prepareSearch("test") + .setQuery(QueryBuilders.matchAllQuery()) + .addScriptField("tvtest", script) + .get().getHits().getTotalHits(), greaterThan(0L)); + } + + private void checkOnlyFunctionScore(Script scoreScript, Map expectedScore, int numExpectedDocs) { + SearchResponse sr = client().prepareSearch("test") + .setQuery(QueryBuilders.functionScoreQuery(ScoreFunctionBuilders.scriptFunction(scoreScript))).execute() + .actionGet(); + assertHitCount(sr, numExpectedDocs); + for (SearchHit hit : sr.getHits().getHits()) { + assertThat("for doc " + hit.getId(), ((Float) expectedScore.get(hit.getId())).doubleValue(), + Matchers.closeTo(hit.score(), 1.e-4)); + } + } + + public void testDocumentationExample() throws Exception { + initTestData(); + + Script script = createScript("Sum the payloads of [float_payload_field][b]"); + + // non existing field: sum should be 0 + HashMap zeroArray = new HashMap<>(); + zeroArray.put("1", 0); + zeroArray.put("2", 0); + zeroArray.put("3", 0); + checkValueInEachDoc(script, zeroArray, 3); + + script = createScript("Sum the payloads of [int_payload_field][b]"); + + // existing field: sums should be as here: + zeroArray.put("1", 5); + zeroArray.put("2", 3); + zeroArray.put("3", 1); + checkValueInEachDoc(script, zeroArray, 3); + } + + public void testIteratorAndRecording() throws Exception { + initTestData(); + + // call twice with record: should work as expected + Script script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL, "position"); + checkArrayValsInEachDoc(script, expectedPositionsArray, 3); + script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL, "startOffset"); + checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); + script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL, "endOffset"); + checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); + script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL, "payloadAsInt(-1)"); + checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); + + // no record and get iterator twice: should fail + script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL_BUT_CACHE, "position"); + checkExceptions(script); + script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL_BUT_CACHE, "startOffset"); + checkExceptions(script); + script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL_BUT_CACHE, "endOffset"); + checkExceptions(script); + script = createPositionsArrayScriptIterateTwice("b", INCLUDE_ALL_BUT_CACHE, "payloadAsInt(-1)"); + checkExceptions(script); + + // no record and get termObject twice and iterate: should fail + script = createPositionsArrayScriptGetInfoObjectTwice("b", INCLUDE_ALL_BUT_CACHE, "position"); + checkExceptions(script); + script = createPositionsArrayScriptGetInfoObjectTwice("b", INCLUDE_ALL_BUT_CACHE, "startOffset"); + checkExceptions(script); + script = createPositionsArrayScriptGetInfoObjectTwice("b", INCLUDE_ALL_BUT_CACHE, "endOffset"); + checkExceptions(script); + script = createPositionsArrayScriptGetInfoObjectTwice("b", INCLUDE_ALL_BUT_CACHE, "payloadAsInt(-1)"); + checkExceptions(script); + + } + + private Script createPositionsArrayScriptGetInfoObjectTwice(String term, String flags, String what) { + return createScript("createPositionsArrayScriptGetInfoObjectTwice[" + term + "," + flags + "," + what + "]"); + } + + private Script createPositionsArrayScriptIterateTwice(String term, String flags, String what) { + return createScript("createPositionsArrayScriptIterateTwice[" + term + "," + flags + "," + what + "]"); + } + + private Script createPositionsArrayScript(String field, String term, String flags, String what) { + return createScript("createPositionsArrayScript[" + field + "," + term + "," + flags + "," + what + "]"); + } + + private Script createPositionsArrayScriptDefaultGet(String field, String term, String what) { + return createScript("createPositionsArrayScriptDefaultGet[" + field + "," + term + "," + what + "]"); + } + + private Script createScript(String script) { + return new Script(script, ScriptType.INLINE, CustomScriptPlugin.NAME, null); + } + + public void testFlags() throws Exception { + initTestData(); + + // check default flag + Script script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "position"); + // there should be no positions + /* TODO: the following tests fail with the new postings enum apis because of a bogus assert in BlockDocsEnum + checkArrayValsInEachDoc(script, emptyArray, 3); + script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "startOffset"); + // there should be no offsets + checkArrayValsInEachDoc(script, emptyArray, 3); + script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "endOffset"); + // there should be no offsets + checkArrayValsInEachDoc(script, emptyArray, 3); + script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "payloadAsInt(-1)"); + // there should be no payload + checkArrayValsInEachDoc(script, emptyArray, 3); + + // check FLAG_FREQUENCIES flag + script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "position"); + // there should be no positions + checkArrayValsInEachDoc(script, emptyArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "startOffset"); + // there should be no offsets + checkArrayValsInEachDoc(script, emptyArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "endOffset"); + // there should be no offsets + checkArrayValsInEachDoc(script, emptyArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "payloadAsInt(-1)"); + // there should be no payloads + checkArrayValsInEachDoc(script, emptyArray, 3);*/ + + // check FLAG_POSITIONS flag + script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "position"); + // there should be positions + checkArrayValsInEachDoc(script, expectedPositionsArray, 3); + /* TODO: these tests make a bogus assumption that asking for positions will return only positions + script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "startOffset"); + // there should be no offsets + checkArrayValsInEachDoc(script, emptyArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "endOffset"); + // there should be no offsets + checkArrayValsInEachDoc(script, emptyArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "payloadAsInt(-1)"); + // there should be no payloads + checkArrayValsInEachDoc(script, emptyArray, 3);*/ + + // check FLAG_OFFSETS flag + script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "position"); + // there should be positions and s forth ... + checkArrayValsInEachDoc(script, expectedPositionsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "startOffset"); + checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "endOffset"); + checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "payloadAsInt(-1)"); + checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); + + // check FLAG_PAYLOADS flag + script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "position"); + checkArrayValsInEachDoc(script, expectedPositionsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "startOffset"); + checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "endOffset"); + checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "payloadAsInt(-1)"); + checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); + + // check all flags + String allFlags = "_POSITIONS|_OFFSETS|_PAYLOADS"; + script = createPositionsArrayScript("int_payload_field", "b", allFlags, "position"); + checkArrayValsInEachDoc(script, expectedPositionsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", allFlags, "startOffset"); + checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", allFlags, "endOffset"); + checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", allFlags, "payloadAsInt(-1)"); + checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); + + // check all flags without record + script = createPositionsArrayScript("int_payload_field", "b", INCLUDE_ALL_BUT_CACHE, "position"); + checkArrayValsInEachDoc(script, expectedPositionsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", INCLUDE_ALL_BUT_CACHE, "startOffset"); + checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", INCLUDE_ALL_BUT_CACHE, "endOffset"); + checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); + script = createPositionsArrayScript("int_payload_field", "b", INCLUDE_ALL_BUT_CACHE, "payloadAsInt(-1)"); + checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); + + } + + private void checkArrayValsInEachDoc(Script script, HashMap> expectedArray, int expectedHitSize) { + SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script) + .execute().actionGet(); + assertHitCount(sr, expectedHitSize); + int nullCounter = 0; + for (SearchHit hit : sr.getHits().getHits()) { + Object result = hit.getFields().get("tvtest").getValues(); + Object expectedResult = expectedArray.get(hit.getId()); + assertThat("for doc " + hit.getId(), result, equalTo(expectedResult)); + if (expectedResult != null) { + nullCounter++; + } + } + assertThat(nullCounter, equalTo(expectedArray.size())); + } + + public void testAllExceptPosAndOffset() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") + .startObject("float_payload_field").field("type", "text").field("index_options", "offsets").field("term_vector", "no") + .field("analyzer", "payload_float").endObject().startObject("string_payload_field").field("type", "text") + .field("index_options", "offsets").field("term_vector", "no").field("analyzer", "payload_string").endObject() + .startObject("int_payload_field").field("type", "text").field("index_options", "offsets") + .field("analyzer", "payload_int").endObject().endObject().endObject().endObject(); + assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings( + Settings.builder() + .put(indexSettings()) + .put("index.analysis.analyzer.payload_float.tokenizer", "whitespace") + .putArray("index.analysis.analyzer.payload_float.filter", "delimited_float") + .put("index.analysis.filter.delimited_float.delimiter", "|") + .put("index.analysis.filter.delimited_float.encoding", "float") + .put("index.analysis.filter.delimited_float.type", "delimited_payload_filter") + .put("index.analysis.analyzer.payload_string.tokenizer", "whitespace") + .putArray("index.analysis.analyzer.payload_string.filter", "delimited_string") + .put("index.analysis.filter.delimited_string.delimiter", "|") + .put("index.analysis.filter.delimited_string.encoding", "identity") + .put("index.analysis.filter.delimited_string.type", "delimited_payload_filter") + .put("index.analysis.analyzer.payload_int.tokenizer", "whitespace") + .putArray("index.analysis.analyzer.payload_int.filter", "delimited_int") + .put("index.analysis.filter.delimited_int.delimiter", "|") + .put("index.analysis.filter.delimited_int.encoding", "int") + .put("index.analysis.filter.delimited_int.type", "delimited_payload_filter") + .put("index.number_of_shards", 1))); + indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("float_payload_field", "a|1 b|2 a|3 b "), client() + .prepareIndex("test", "type1", "2").setSource("string_payload_field", "a|a b|b a|a b "), + client().prepareIndex("test", "type1", "3").setSource("float_payload_field", "a|4 b|5 a|6 b "), + client().prepareIndex("test", "type1", "4").setSource("string_payload_field", "a|b b|a a|b b "), + client().prepareIndex("test", "type1", "5").setSource("float_payload_field", "c "), + client().prepareIndex("test", "type1", "6").setSource("int_payload_field", "c|1")); + + // get the number of all docs + Script script = createScript("_index.numDocs()"); + checkValueInEachDoc(6, script, 6); + + // get the number of docs with field float_payload_field + script = createScript("_index['float_payload_field'].docCount()"); + checkValueInEachDoc(3, script, 6); + + // corner case: what if the field does not exist? + script = createScript("_index['non_existent_field'].docCount()"); + checkValueInEachDoc(0, script, 6); + + // get the number of all tokens in all docs + script = createScript("_index['float_payload_field'].sumttf()"); + checkValueInEachDoc(9, script, 6); + + // corner case get the number of all tokens in all docs for non existent + // field + script = createScript("_index['non_existent_field'].sumttf()"); + checkValueInEachDoc(0, script, 6); + + // get the sum of doc freqs in all docs + script = createScript("_index['float_payload_field'].sumdf()"); + checkValueInEachDoc(5, script, 6); + + // get the sum of doc freqs in all docs for non existent field + script = createScript("_index['non_existent_field'].sumdf()"); + checkValueInEachDoc(0, script, 6); + + // check term frequencies for 'a' + script = createScript("term = _index['float_payload_field']['a']; if (term != null) {term.tf()}"); + Map expectedResults = new HashMap<>(); + expectedResults.put("1", 2); + expectedResults.put("2", 0); + expectedResults.put("3", 2); + expectedResults.put("4", 0); + expectedResults.put("5", 0); + expectedResults.put("6", 0); + checkValueInEachDoc(script, expectedResults, 6); + expectedResults.clear(); + + // check doc frequencies for 'c' + script = createScript("term = _index['float_payload_field']['c']; if (term != null) {term.df()}"); + expectedResults.put("1", 1L); + expectedResults.put("2", 1L); + expectedResults.put("3", 1L); + expectedResults.put("4", 1L); + expectedResults.put("5", 1L); + expectedResults.put("6", 1L); + checkValueInEachDoc(script, expectedResults, 6); + expectedResults.clear(); + + // check doc frequencies for term that does not exist + script = createScript("term = _index['float_payload_field']['non_existent_term']; if (term != null) {term.df()}"); + expectedResults.put("1", 0L); + expectedResults.put("2", 0L); + expectedResults.put("3", 0L); + expectedResults.put("4", 0L); + expectedResults.put("5", 0L); + expectedResults.put("6", 0L); + checkValueInEachDoc(script, expectedResults, 6); + expectedResults.clear(); + + // check doc frequencies for term that does not exist + script = createScript("term = _index['non_existent_field']['non_existent_term']; if (term != null) {term.tf()}"); + expectedResults.put("1", 0); + expectedResults.put("2", 0); + expectedResults.put("3", 0); + expectedResults.put("4", 0); + expectedResults.put("5", 0); + expectedResults.put("6", 0); + checkValueInEachDoc(script, expectedResults, 6); + expectedResults.clear(); + + // check total term frequencies for 'a' + script = createScript("term = _index['float_payload_field']['a']; if (term != null) {term.ttf()}"); + expectedResults.put("1", 4L); + expectedResults.put("2", 4L); + expectedResults.put("3", 4L); + expectedResults.put("4", 4L); + expectedResults.put("5", 4L); + expectedResults.put("6", 4L); + checkValueInEachDoc(script, expectedResults, 6); + expectedResults.clear(); + + // check float payload for 'b' + HashMap> expectedPayloadsArray = new HashMap<>(); + script = createPositionsArrayScript("float_payload_field", "b", INCLUDE_ALL, "payloadAsFloat(-1)"); + float missingValue = -1; + List payloadsFor1 = new ArrayList<>(); + payloadsFor1.add(2f); + payloadsFor1.add(missingValue); + expectedPayloadsArray.put("1", payloadsFor1); + List payloadsFor2 = new ArrayList<>(); + payloadsFor2.add(5f); + payloadsFor2.add(missingValue); + expectedPayloadsArray.put("3", payloadsFor2); + expectedPayloadsArray.put("6", new ArrayList<>()); + expectedPayloadsArray.put("5", new ArrayList<>()); + expectedPayloadsArray.put("4", new ArrayList<>()); + expectedPayloadsArray.put("2", new ArrayList<>()); + checkArrayValsInEachDoc(script, expectedPayloadsArray, 6); + + // check string payload for 'b' + expectedPayloadsArray.clear(); + payloadsFor1.clear(); + payloadsFor2.clear(); + script = createPositionsArrayScript("string_payload_field", "b", INCLUDE_ALL, "payloadAsString()"); + payloadsFor1.add("b"); + payloadsFor1.add(null); + expectedPayloadsArray.put("2", payloadsFor1); + payloadsFor2.add("a"); + payloadsFor2.add(null); + expectedPayloadsArray.put("4", payloadsFor2); + expectedPayloadsArray.put("6", new ArrayList<>()); + expectedPayloadsArray.put("5", new ArrayList<>()); + expectedPayloadsArray.put("3", new ArrayList<>()); + expectedPayloadsArray.put("1", new ArrayList<>()); + checkArrayValsInEachDoc(script, expectedPayloadsArray, 6); + + // check int payload for 'c' + expectedPayloadsArray.clear(); + payloadsFor1.clear(); + payloadsFor2.clear(); + script = createPositionsArrayScript("int_payload_field", "c", INCLUDE_ALL, "payloadAsInt(-1)"); + payloadsFor1 = new ArrayList<>(); + payloadsFor1.add(1); + expectedPayloadsArray.put("6", payloadsFor1); + expectedPayloadsArray.put("5", new ArrayList<>()); + expectedPayloadsArray.put("4", new ArrayList<>()); + expectedPayloadsArray.put("3", new ArrayList<>()); + expectedPayloadsArray.put("2", new ArrayList<>()); + expectedPayloadsArray.put("1", new ArrayList<>()); + checkArrayValsInEachDoc(script, expectedPayloadsArray, 6); + + } + + private void checkExceptions(Script script) { + try { + SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script) + .execute().actionGet(); + assertThat(sr.getHits().hits().length, equalTo(0)); + ShardSearchFailure[] shardFails = sr.getShardFailures(); + for (ShardSearchFailure fail : shardFails) { + assertThat(fail.reason().indexOf("Cannot iterate twice! If you want to iterate more that once, add _CACHE explicitly."), + Matchers.greaterThan(-1)); + } + } catch (SearchPhaseExecutionException ex) { + assertThat( + "got " + ex.toString(), + ex.toString().indexOf("Cannot iterate twice! If you want to iterate more that once, add _CACHE explicitly."), + Matchers.greaterThan(-1)); + } + } + + private void checkValueInEachDocWithFunctionScore(Script fieldScript, Map expectedFieldVals, Script scoreScript, + Map expectedScore, int numExpectedDocs) { + SearchResponse sr = client().prepareSearch("test") + .setQuery(QueryBuilders.functionScoreQuery(ScoreFunctionBuilders.scriptFunction(scoreScript))) + .addScriptField("tvtest", fieldScript).execute().actionGet(); + assertHitCount(sr, numExpectedDocs); + for (SearchHit hit : sr.getHits().getHits()) { + Object result = hit.getFields().get("tvtest").getValues().get(0); + Object expectedResult = expectedFieldVals.get(hit.getId()); + assertThat("for doc " + hit.getId(), result, equalTo(expectedResult)); + assertThat("for doc " + hit.getId(), ((Float) expectedScore.get(hit.getId())).doubleValue(), + Matchers.closeTo(hit.score(), 1.e-4)); + } + } + + private void checkValueInEachDoc(Script script, Map expectedResults, int numExpectedDocs) { + SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script) + .execute().actionGet(); + assertHitCount(sr, numExpectedDocs); + for (SearchHit hit : sr.getHits().getHits()) { + Object result = hit.getFields().get("tvtest").getValues().get(0); + Object expectedResult = expectedResults.get(hit.getId()); + assertThat("for doc " + hit.getId(), result, equalTo(expectedResult)); + } + } + + private void checkValueInEachDoc(int value, Script script, int numExpectedDocs) { + SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script) + .execute().actionGet(); + assertHitCount(sr, numExpectedDocs); + for (SearchHit hit : sr.getHits().getHits()) { + Object result = hit.getFields().get("tvtest").getValues().get(0); + if (result instanceof Integer) { + assertThat(result, equalTo(value)); + } else if (result instanceof Long) { + assertThat(((Long) result).intValue(), equalTo(value)); + } else { + fail(); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index c7ee421e7e0..16a1c20792f 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -43,7 +43,9 @@ public class ScriptContextTests extends ESTestCase { .put("script." + PLUGIN_NAME + "_custom_globally_disabled_op", "false") .put("script.engine." + MockScriptEngine.NAME + ".inline." + PLUGIN_NAME + "_custom_exp_disabled_op", "false") .build(); - ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new MockScriptEngine())); + + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", script -> "1")); + ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(scriptEngine)); List customContexts = Arrays.asList( new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), diff --git a/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java b/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java index 6ae607e7b8e..1fc9ed8ff77 100644 --- a/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java +++ b/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java @@ -86,7 +86,7 @@ public class StoredScriptsIT extends ESIntegTestCase { @Override protected Map, Object>> pluginScripts() { - return Collections.emptyMap(); + return Collections.singletonMap("1", script -> "1"); } } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java similarity index 83% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index 9955d244fa5..d3e934d875f 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -16,13 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; @@ -36,12 +37,15 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.range; +import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; @@ -49,11 +53,8 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase -public class RangeTests extends ESIntegTestCase { +public class RangeIT extends ESIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final String MULTI_VALUED_FIELD_NAME = "l_values"; @@ -62,7 +63,30 @@ public class RangeTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = super.pluginScripts(); + + scripts.put("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get(SINGLE_VALUED_FIELD_NAME); + return value.getValue(); + }); + + scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "'].values", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get(MULTI_VALUED_FIELD_NAME); + return value.getValues(); + }); + + return scripts; + } } @Override @@ -94,10 +118,10 @@ public class RangeTests extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).size(100) .collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation( - range("range").field(SINGLE_VALUED_FIELD_NAME) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6))) + range("range").field(SINGLE_VALUED_FIELD_NAME) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6))) .execute().actionGet(); assertSearchResponse(response); @@ -112,7 +136,7 @@ public class RangeTests extends ESIntegTestCase { Range range = bucket.getAggregations().get("range"); List buckets = range.getBuckets(); Range.Bucket rangeBucket = buckets.get(0); - assertThat((String) rangeBucket.getKey(), equalTo("*-3.0")); + assertThat(rangeBucket.getKey(), equalTo("*-3.0")); assertThat(rangeBucket.getKeyAsString(), equalTo("*-3.0")); assertThat(rangeBucket, notNullValue()); assertThat(rangeBucket.getFromAsString(), nullValue()); @@ -125,7 +149,7 @@ public class RangeTests extends ESIntegTestCase { assertThat(rangeBucket.getDocCount(), equalTo(0L)); } rangeBucket = buckets.get(1); - assertThat((String) rangeBucket.getKey(), equalTo("3.0-6.0")); + assertThat(rangeBucket.getKey(), equalTo("3.0-6.0")); assertThat(rangeBucket.getKeyAsString(), equalTo("3.0-6.0")); assertThat(rangeBucket, notNullValue()); assertThat(rangeBucket.getFromAsString(), equalTo("3.0")); @@ -138,7 +162,7 @@ public class RangeTests extends ESIntegTestCase { assertThat(rangeBucket.getDocCount(), equalTo(0L)); } rangeBucket = buckets.get(2); - assertThat((String) rangeBucket.getKey(), equalTo("6.0-*")); + assertThat(rangeBucket.getKey(), equalTo("6.0-*")); assertThat(rangeBucket.getKeyAsString(), equalTo("6.0-*")); assertThat(rangeBucket, notNullValue()); assertThat(rangeBucket.getFromAsString(), equalTo("6.0")); @@ -173,7 +197,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -182,7 +206,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -191,7 +215,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -217,7 +241,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3")); + assertThat(bucket.getKey(), equalTo("*-3")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -226,7 +250,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3-6")); + assertThat(bucket.getKey(), equalTo("3-6")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3")); @@ -235,7 +259,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6-*")); + assertThat(bucket.getKey(), equalTo("6-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6")); @@ -263,7 +287,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("r1")); + assertThat(bucket.getKey(), equalTo("r1")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -272,7 +296,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("r2")); + assertThat(bucket.getKey(), equalTo("r2")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -281,7 +305,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("r3")); + assertThat(bucket.getKey(), equalTo("r3")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -313,7 +337,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -322,13 +346,13 @@ public class RangeTests extends ESIntegTestCase { Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(3.0)); // 1 + 2 - assertThat((String) propertiesKeys[0], equalTo("*-3.0")); - assertThat((long) propertiesDocCounts[0], equalTo(2L)); - assertThat((double) propertiesCounts[0], equalTo(3.0)); + assertThat(propertiesKeys[0], equalTo("*-3.0")); + assertThat(propertiesDocCounts[0], equalTo(2L)); + assertThat(propertiesCounts[0], equalTo(3.0)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -337,13 +361,13 @@ public class RangeTests extends ESIntegTestCase { sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(12.0)); // 3 + 4 + 5 - assertThat((String) propertiesKeys[1], equalTo("3.0-6.0")); - assertThat((long) propertiesDocCounts[1], equalTo(3L)); - assertThat((double) propertiesCounts[1], equalTo(12.0)); + assertThat(propertiesKeys[1], equalTo("3.0-6.0")); + assertThat(propertiesDocCounts[1], equalTo(3L)); + assertThat(propertiesCounts[1], equalTo(12.0)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -356,17 +380,22 @@ public class RangeTests extends ESIntegTestCase { total += i + 1; } assertThat(sum.getValue(), equalTo((double) total)); - assertThat((String) propertiesKeys[2], equalTo("6.0-*")); - assertThat((long) propertiesDocCounts[2], equalTo(numDocs - 5L)); - assertThat((double) propertiesCounts[2], equalTo((double) total)); + assertThat(propertiesKeys[2], equalTo("6.0-*")); + assertThat(propertiesDocCounts[2], equalTo(numDocs - 5L)); + assertThat(propertiesCounts[2], equalTo((double) total)); } public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client() .prepareSearch("idx") .addAggregation( - range("range").field(SINGLE_VALUED_FIELD_NAME).script(new Script("_value + 1")).addUnboundedTo(3).addRange(3, 6) - .addUnboundedFrom(6)).execute().actionGet(); + range("range") + .field(SINGLE_VALUED_FIELD_NAME) + .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6)) + .get(); assertSearchResponse(response); @@ -378,7 +407,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -387,7 +416,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -396,7 +425,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -437,7 +466,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -446,7 +475,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -455,7 +484,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -480,8 +509,13 @@ public class RangeTests extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation( - range("range").field(MULTI_VALUED_FIELD_NAME).script(new Script("_value + 1")).addUnboundedTo(3).addRange(3, 6) - .addUnboundedFrom(6)).execute().actionGet(); + range("range") + .field(MULTI_VALUED_FIELD_NAME) + .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6)) + .get(); assertSearchResponse(response); @@ -494,7 +528,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -503,7 +537,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -512,7 +546,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -538,11 +572,16 @@ public class RangeTests extends ESIntegTestCase { */ public void testScriptSingleValue() throws Exception { + Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); SearchResponse response = client() .prepareSearch("idx") .addAggregation( - range("range").script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")).addUnboundedTo(3).addRange(3, 6) - .addUnboundedFrom(6)).execute().actionGet(); + range("range") + .script(script) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6)) + .get(); assertSearchResponse(response); @@ -555,7 +594,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -564,7 +603,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -573,7 +612,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -600,7 +639,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*--1.0")); + assertThat(bucket.getKey(), equalTo("*--1.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(-1.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -609,7 +648,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("1000.0-*")); + assertThat(bucket.getKey(), equalTo("1000.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000d)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("1000.0")); @@ -618,11 +657,17 @@ public class RangeTests extends ESIntegTestCase { } public void testScriptMultiValued() throws Exception { + Script script = new Script("doc['" + MULTI_VALUED_FIELD_NAME + "'].values", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .addAggregation( - range("range").script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "'].values")).addUnboundedTo(3).addRange(3, 6) - .addUnboundedFrom(6)).execute().actionGet(); + range("range") + .script(script) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6)) + .get(); assertSearchResponse(response); @@ -635,7 +680,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -644,7 +689,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -653,7 +698,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -698,7 +743,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -707,7 +752,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -716,7 +761,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -746,7 +791,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-3.0")); + assertThat(bucket.getKey(), equalTo("*-3.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -755,7 +800,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -764,7 +809,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("6.0-*")); + assertThat(bucket.getKey(), equalTo("6.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("6.0")); @@ -793,7 +838,7 @@ public class RangeTests extends ESIntegTestCase { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-5.0")); + assertThat(bucket.getKey(), equalTo("*-5.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(5.0)); assertThat(bucket.getFromAsString(), nullValue()); @@ -802,7 +847,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("3.0-6.0")); + assertThat(bucket.getKey(), equalTo("3.0-6.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0)); assertThat(bucket.getFromAsString(), equalTo("3.0")); @@ -811,7 +856,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("4.0-5.0")); + assertThat(bucket.getKey(), equalTo("4.0-5.0")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(4.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(5.0)); assertThat(bucket.getFromAsString(), equalTo("4.0")); @@ -820,7 +865,7 @@ public class RangeTests extends ESIntegTestCase { bucket = buckets.get(3); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("4.0-*")); + assertThat(bucket.getKey(), equalTo("4.0-*")); assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(4.0)); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getFromAsString(), equalTo("4.0")); @@ -831,9 +876,16 @@ public class RangeTests extends ESIntegTestCase { public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L).minDocCount(0) - .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addRange("0-2", 0.0, 2.0))) - .execute().actionGet(); + .addAggregation( + histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(1L) + .minDocCount(0) + .subAggregation( + range("range") + .field(SINGLE_VALUED_FIELD_NAME) + .addRange("0-2", 0.0, 2.0))) + .get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -843,11 +895,11 @@ public class RangeTests extends ESIntegTestCase { Range range = bucket.getAggregations().get("range"); // TODO: use diamond once JI-9019884 is fixed - List buckets = new ArrayList(range.getBuckets()); + List buckets = new ArrayList<>(range.getBuckets()); assertThat(range, Matchers.notNullValue()); assertThat(range.getName(), equalTo("range")); assertThat(buckets.size(), is(1)); - assertThat((String) buckets.get(0).getKey(), equalTo("0-2")); + assertThat(buckets.get(0).getKey(), equalTo("0-2")); assertThat(((Number) buckets.get(0).getFrom()).doubleValue(), equalTo(0.0)); assertThat(((Number) buckets.get(0).getTo()).doubleValue(), equalTo(2.0)); assertThat(buckets.get(0).getFromAsString(), equalTo("0.0")); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java similarity index 68% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 713ef12e1a7..e1800b2f9f1 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -17,18 +17,18 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; -import org.elasticsearch.script.groovy.GroovyScriptEngineService; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.global.Global; @@ -39,12 +39,17 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -64,13 +69,130 @@ import static org.hamcrest.Matchers.sameInstance; @ClusterScope(scope = Scope.SUITE) @ESIntegTestCase.SuiteScopeTestCase -public class ScriptedMetricTests extends ESIntegTestCase { +public class ScriptedMetricIT extends ESIntegTestCase { private static long numDocs; @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("_agg['count'] = 1", vars -> + aggScript(vars, agg -> ((Map) agg).put("count", 1))); + + scripts.put("_agg.add(1)", vars -> + aggScript(vars, agg -> ((List) agg).add(1))); + + scripts.put("vars.multiplier = 3", vars -> + ((Map) vars.get("vars")).put("multiplier", 3)); + + scripts.put("_agg.add(vars.multiplier)", vars -> + aggScript(vars, agg -> ((List) agg).add(XContentMapValues.extractValue("vars.multiplier", vars)))); + + // Equivalent to: + // + // newaggregation = []; + // sum = 0; + // + // for (a in _agg) { + // sum += a + // }; + // + // newaggregation.add(sum); + // return newaggregation" + // + scripts.put("sum agg values as a new aggregation", vars -> { + List newAggregation = new ArrayList(); + List agg = (List) vars.get("_agg"); + + if (agg != null) { + Integer sum = 0; + for (Object a : (List) agg) { + sum += ((Number) a).intValue(); + } + newAggregation.add(sum); + } + return newAggregation; + }); + + // Equivalent to: + // + // newaggregation = []; + // sum = 0; + // + // for (aggregation in _aggs) { + // for (a in aggregation) { + // sum += a + // } + // }; + // + // newaggregation.add(sum); + // return newaggregation" + // + scripts.put("sum aggs of agg values as a new aggregation", vars -> { + List newAggregation = new ArrayList(); + Integer sum = 0; + + List aggs = (List) vars.get("_aggs"); + for (Object aggregation : (List) aggs) { + if (aggregation != null) { + for (Object a : (List) aggregation) { + sum += ((Number) a).intValue(); + } + } + } + newAggregation.add(sum); + return newAggregation; + }); + + // Equivalent to: + // + // newaggregation = []; + // sum = 0; + // + // for (aggregation in _aggs) { + // for (a in aggregation) { + // sum += a + // } + // }; + // + // newaggregation.add(sum * multiplier); + // return newaggregation" + // + scripts.put("multiplied sum aggs of agg values as a new aggregation", vars -> { + Integer multiplier = (Integer) vars.get("multiplier"); + List newAggregation = new ArrayList(); + Integer sum = 0; + + List aggs = (List) vars.get("_aggs"); + for (Object aggregation : (List) aggs) { + if (aggregation != null) { + for (Object a : (List) aggregation) { + sum += ((Number) a).intValue(); + } + } + } + newAggregation.add(sum * multiplier); + return newAggregation; + }); + + return scripts; + } + + @SuppressWarnings("unchecked") + static Object aggScript(Map vars, Consumer fn) { + T agg = (T) vars.get("_agg"); + fn.accept(agg); + return agg; + } } @Override @@ -83,7 +205,7 @@ public class ScriptedMetricTests extends ESIntegTestCase { for (int i = 0; i < numDocs; i++) { builders.add(client().prepareIndex("idx", "type", "" + i).setSource( jsonBuilder().startObject().field("value", randomAsciiOfLengthBetween(5, 15)) - .field("l_value", i).endObject())); + .field("l_value", i).endObject())); } indexRandom(true, builders); @@ -102,25 +224,28 @@ public class ScriptedMetricTests extends ESIntegTestCase { jsonBuilder().startObject().field("value", i * 2).endObject())); } + // When using the MockScriptPlugin we can map Stored scripts to inline scripts: + // the id of the stored script is used in test method while the source of the stored script + // must match a predefined script from CustomScriptPlugin.pluginScripts() method assertAcked(client().admin().cluster().preparePutStoredScript() - .setScriptLang(GroovyScriptEngineService.NAME) - .setId("initScript_indexed") + .setScriptLang(CustomScriptPlugin.NAME) + .setId("initScript_stored") .setSource(new BytesArray("{\"script\":\"vars.multiplier = 3\"}"))); assertAcked(client().admin().cluster().preparePutStoredScript() - .setScriptLang(GroovyScriptEngineService.NAME) - .setId("mapScript_indexed") + .setScriptLang(CustomScriptPlugin.NAME) + .setId("mapScript_stored") .setSource(new BytesArray("{\"script\":\"_agg.add(vars.multiplier)\"}"))); assertAcked(client().admin().cluster().preparePutStoredScript() - .setScriptLang(GroovyScriptEngineService.NAME) - .setId("combineScript_indexed") - .setSource(new BytesArray("{\"script\":\"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation\"}"))); + .setScriptLang(CustomScriptPlugin.NAME) + .setId("combineScript_stored") + .setSource(new BytesArray("{\"script\":\"sum agg values as a new aggregation\"}"))); assertAcked(client().admin().cluster().preparePutStoredScript() - .setScriptLang(GroovyScriptEngineService.NAME) - .setId("reduceScript_indexed") - .setSource(new BytesArray("{\"script\":\"newaggregation = []; sum = 0;for (agg in _aggs) { for (a in agg) { sum += a} }; newaggregation.add(sum); return newaggregation\"}"))); + .setScriptLang(CustomScriptPlugin.NAME) + .setId("reduceScript_stored") + .setSource(new BytesArray("{\"script\":\"sum aggs of agg values as a new aggregation\"}"))); indexRandom(true, builders); ensureSearchable(); @@ -128,16 +253,36 @@ public class ScriptedMetricTests extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - Settings settings = Settings.builder() + Path config = createTempDir().resolve("config"); + Path scripts = config.resolve("scripts"); + + try { + Files.createDirectories(scripts); + + // When using the MockScriptPlugin we can map File scripts to inline scripts: + // the name of the file script is used in test method while the source of the file script + // must match a predefined script from CustomScriptPlugin.pluginScripts() method + Files.write(scripts.resolve("init_script.mockscript"), "vars.multiplier = 3".getBytes("UTF-8")); + Files.write(scripts.resolve("map_script.mockscript"), "_agg.add(vars.multiplier)".getBytes("UTF-8")); + Files.write(scripts.resolve("combine_script.mockscript"), "sum agg values as a new aggregation".getBytes("UTF-8")); + Files.write(scripts.resolve("reduce_script.mockscript"), "sum aggs of agg values as a new aggregation".getBytes("UTF-8")); + } catch (IOException e) { + throw new RuntimeException("failed to create scripts"); + } + + return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/org/elasticsearch/messy/tests/conf")) + .put(Environment.PATH_CONF_SETTING.getKey(), config) .build(); - return settings; } public void testMap() { - SearchResponse response = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(scriptedMetric("scripted").mapScript(new Script("_agg['count'] = 1"))).execute().actionGet(); + Script mapScript = new Script("_agg['count'] = 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(scriptedMetric("scripted").mapScript(mapScript)) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -159,7 +304,7 @@ public class ScriptedMetricTests extends ESIntegTestCase { if (map.size() == 1) { assertThat(map.get("count"), notNullValue()); assertThat(map.get("count"), instanceOf(Number.class)); - assertThat((Number) map.get("count"), equalTo((Number) 1)); + assertThat(map.get("count"), equalTo((Number) 1)); numShardsRun++; } } @@ -172,8 +317,12 @@ public class ScriptedMetricTests extends ESIntegTestCase { Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); - SearchResponse response = client().prepareSearch("idx").setQuery(matchAllQuery()) - .addAggregation(scriptedMetric("scripted").params(params).mapScript(new Script("_agg.add(1)"))).execute().actionGet(); + Script mapScript = new Script("_agg.add(1)", ScriptType.INLINE, CustomScriptPlugin.NAME, params); + + SearchResponse response = client().prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation(scriptedMetric("scripted").params(params).mapScript(mapScript)) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -205,6 +354,7 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testInitMapWithParams() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); @@ -213,8 +363,11 @@ public class ScriptedMetricTests extends ESIntegTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - scriptedMetric("scripted").params(params).initScript(new Script("vars.multiplier = 3")) - .mapScript(new Script("_agg.add(vars.multiplier)"))).execute().actionGet(); + scriptedMetric("scripted") + .params(params) + .initScript(new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .mapScript(new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -246,20 +399,22 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testMapCombineWithParams() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Script mapScript = new Script("_agg.add(1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted") .params(params) - .mapScript(new Script("_agg.add(1)")) - .combineScript( - new Script( - "newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation"))) + .mapScript(mapScript) + .combineScript(combineScript)) .execute().actionGet(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -295,22 +450,25 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testInitMapCombineWithParams() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("vars.multiplier = 3")) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .combineScript( - new Script( - "newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation"))) - .execute().actionGet(); + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript)) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -345,25 +503,27 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testInitMapCombineReduceWithParams() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("vars.multiplier = 3")) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .combineScript( - new Script( - "newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation")) - .reduceScript( - new Script( - "newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation"))) - .execute().actionGet(); + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript)) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -386,9 +546,16 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testInitMapCombineReduceGetProperty() throws Exception { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + + Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) @@ -397,15 +564,11 @@ public class ScriptedMetricTests extends ESIntegTestCase { .subAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("vars.multiplier = 3")) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .combineScript( - new Script( - "newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation")) - .reduceScript( - new Script( - "newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation")))) - .execute().actionGet(); + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript))) + .get(); assertSearchResponse(searchResponse); assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocs)); @@ -437,24 +600,25 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testMapCombineReduceWithParams() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted") .params(params) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .combineScript( - new Script( - "newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation")) - .reduceScript( - new Script( - "newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation"))) - .execute().actionGet(); + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript)) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -476,22 +640,25 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testInitMapReduceWithParams() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("vars.multiplier = 3")) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .reduceScript( - new Script( - "newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation"))) - .execute().actionGet(); + .initScript(initScript) + .mapScript(mapScript) + .reduceScript(reduceScript)) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -517,17 +684,18 @@ public class ScriptedMetricTests extends ESIntegTestCase { params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted") .params(params) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .reduceScript( - new Script( - "newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation"))) - .execute().actionGet(); + .mapScript(mapScript) + .reduceScript(reduceScript)) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -549,27 +717,30 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testInitMapCombineReduceWithParamsAndReduceParams() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Map reduceParams = new HashMap<>(); reduceParams.put("multiplier", 4); + Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script reduceScript = new Script("multiplied sum aggs of agg values as a new aggregation", ScriptType.INLINE, + CustomScriptPlugin.NAME, reduceParams); + SearchResponse response = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("vars.multiplier = 3")) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .combineScript( - new Script( - "newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation")) - .reduceScript( - new Script( - "newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum * multiplier); return newaggregation", - ScriptType.INLINE, null, reduceParams))) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript)) .execute().actionGet(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -589,9 +760,10 @@ public class ScriptedMetricTests extends ESIntegTestCase { assertThat(((Number) object).longValue(), equalTo(numDocs * 12)); } - public void testInitMapCombineReduceWithParamsIndexed() { + public void testInitMapCombineReduceWithParamsStored() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); @@ -600,11 +772,13 @@ public class ScriptedMetricTests extends ESIntegTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - scriptedMetric("scripted").params(params) - .initScript(new Script("initScript_indexed", ScriptType.STORED, null, null)) - .mapScript(new Script("mapScript_indexed", ScriptType.STORED, null, null)) - .combineScript(new Script("combineScript_indexed", ScriptType.STORED, null, null)) - .reduceScript(new Script("reduceScript_indexed", ScriptType.STORED, null, null))).execute().actionGet(); + scriptedMetric("scripted") + .params(params) + .initScript(new Script("initScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null)) + .mapScript(new Script("mapScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null)) + .combineScript(new Script("combineScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null)) + .reduceScript(new Script("reduceScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -634,10 +808,13 @@ public class ScriptedMetricTests extends ESIntegTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - scriptedMetric("scripted").params(params).initScript(new Script("init_script", ScriptType.FILE, null, null)) - .mapScript(new Script("map_script", ScriptType.FILE, null, null)) - .combineScript(new Script("combine_script", ScriptType.FILE, null, null)) - .reduceScript(new Script("reduce_script", ScriptType.FILE, null, null))).execute().actionGet(); + scriptedMetric("scripted") + .params(params) + .initScript(new Script("init_script", ScriptType.FILE, CustomScriptPlugin.NAME, null)) + .mapScript(new Script("map_script", ScriptType.FILE, CustomScriptPlugin.NAME, null)) + .combineScript(new Script("combine_script", ScriptType.FILE, CustomScriptPlugin.NAME, null)) + .reduceScript(new Script("reduce_script", ScriptType.FILE, CustomScriptPlugin.NAME, null))) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); @@ -659,10 +836,16 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testInitMapCombineReduceWithParamsAsSubAgg() { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .setQuery(matchAllQuery()).setSize(1000) @@ -673,15 +856,11 @@ public class ScriptedMetricTests extends ESIntegTestCase { .subAggregation( scriptedMetric("scripted") .params(params) - .initScript(new Script("vars.multiplier = 3")) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .combineScript( - new Script( - "newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation")) - .reduceScript( - new Script( - "newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation")))) - .execute().actionGet(); + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript))) + .get(); assertSearchResponse(response); assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("histo"); @@ -716,25 +895,27 @@ public class ScriptedMetricTests extends ESIntegTestCase { public void testEmptyAggregation() throws Exception { Map varsMap = new HashMap<>(); varsMap.put("multiplier", 1); + Map params = new HashMap<>(); params.put("_agg", new ArrayList<>()); params.put("vars", varsMap); + Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) .subAggregation( - scriptedMetric("scripted") - .params(params) - .initScript(new Script("vars.multiplier = 3")) - .mapScript(new Script("_agg.add(vars.multiplier)")) - .combineScript( - new Script( - "newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation")) - .reduceScript( - new Script( - "newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation")))) - .execute().actionGet(); + scriptedMetric("scripted") + .params(params) + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript))) + .get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index bfd4ed8ada9..e2b91b59fe3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -101,7 +101,7 @@ public class TopHitsIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override protected Map, Object>> pluginScripts() { - return Collections.emptyMap(); + return Collections.singletonMap("5", script -> "5"); } } diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index a3f87530970..3a07c98ab3e 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MockEngineFactoryPlugin; @@ -108,7 +107,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { for (int i = 0; i < numDocs; i++) { try { IndexResponse indexResponse = client().prepareIndex("test", "type", "" + i).setTimeout(TimeValue.timeValueSeconds(1)).setSource("test", English.intToEnglish(i)).get(); - if (indexResponse.getOperation() == DocWriteResponse.Operation.CREATE) { + if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) { numCreated++; added[i] = true; } diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index 91c2b71a713..d3e82d1b4da 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -137,7 +137,7 @@ public class SearchWithRandomIOExceptionsIT extends ESIntegTestCase { added[i] = false; try { IndexResponse indexResponse = client().prepareIndex("test", "type", Integer.toString(i)).setTimeout(TimeValue.timeValueSeconds(1)).setSource("test", English.intToEnglish(i)).get(); - if (indexResponse.getOperation() == DocWriteResponse.Operation.CREATE) { + if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) { numCreated++; added[i] = true; } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java similarity index 69% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java rename to core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 03b85d57e0e..a9f73935504 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -17,28 +17,30 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.fields; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.Priority; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.lookup.FieldLookup; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; @@ -49,11 +51,13 @@ import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; +import java.util.function.Function; import static java.util.Collections.singleton; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -72,13 +76,82 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -/** - * - */ -public class SearchFieldsTests extends ESIntegTestCase { +public class SearchFieldsIT extends ESIntegTestCase { + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return pluginList(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("doc['num1'].value", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1"); + return num1.getValue(); + }); + + scripts.put("doc['num1'].value * factor", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1"); + Double factor = (Double) vars.get("factor"); + return num1.getValue() * factor; + }); + + scripts.put("doc['date'].date.millis", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs date = (ScriptDocValues.Longs) doc.get("date"); + return date.getDate().getMillis(); + }); + + scripts.put("_fields['num1'].value", vars -> fieldsScript(vars, "num1")); + scripts.put("_fields._uid.value", vars -> fieldsScript(vars, "_uid")); + scripts.put("_fields._id.value", vars -> fieldsScript(vars, "_id")); + scripts.put("_fields._type.value", vars -> fieldsScript(vars, "_type")); + + scripts.put("_source.obj1", vars -> sourceScript(vars, "obj1")); + scripts.put("_source.obj1.test", vars -> sourceScript(vars, "obj1.test")); + scripts.put("_source.obj1.test", vars -> sourceScript(vars, "obj1.test")); + scripts.put("_source.obj2", vars -> sourceScript(vars, "obj2")); + scripts.put("_source.obj2.arr2", vars -> sourceScript(vars, "obj2.arr2")); + scripts.put("_source.arr3", vars -> sourceScript(vars, "arr3")); + + scripts.put("return null", vars -> null); + + scripts.put("doc['l'].values", vars -> docScript(vars, "l")); + scripts.put("doc['ml'].values", vars -> docScript(vars, "ml")); + scripts.put("doc['d'].values", vars -> docScript(vars, "d")); + scripts.put("doc['md'].values", vars -> docScript(vars, "md")); + scripts.put("doc['s'].values", vars -> docScript(vars, "s")); + scripts.put("doc['ms'].values", vars -> docScript(vars, "ms")); + + return scripts; + } + + @SuppressWarnings("unchecked") + static Object fieldsScript(Map vars, String fieldName) { + Map fields = (Map) vars.get("_fields"); + FieldLookup fieldLookup = (FieldLookup) fields.get(fieldName); + return fieldLookup.getValue(); + } + + @SuppressWarnings("unchecked") + static Object sourceScript(Map vars, String path) { + Map source = (Map) vars.get("_source"); + return XContentMapValues.extractValue(path, source); + } + + @SuppressWarnings("unchecked") + static Object docScript(Map vars, String fieldName) { + Map doc = (Map) vars.get("doc"); + ScriptDocValues values = (ScriptDocValues) doc.get(fieldName); + return values.getValues(); + } } public void testStoredFields() throws Exception { @@ -127,7 +200,12 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2").execute().actionGet(); + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addStoredField("*3") + .addStoredField("field1") + .addStoredField("field2") + .get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2)); @@ -156,7 +234,11 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source").execute().actionGet(); + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addStoredField("*") + .addStoredField("_source") + .get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).source(), notNullValue()); @@ -175,25 +257,37 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); client().prepareIndex("test", "type1", "1") - .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).field("date", "1970-01-01T00:00:00").endObject()) + .setSource(jsonBuilder().startObject() + .field("test", "value beck") + .field("num1", 1.0f) + .field("date", "1970-01-01T00:00:00") + .endObject()) .execute().actionGet(); client().admin().indices().prepareFlush().execute().actionGet(); client().prepareIndex("test", "type1", "2") - .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).field("date", "1970-01-01T00:00:25").endObject()) - .execute().actionGet(); + .setSource(jsonBuilder().startObject() + .field("test", "value beck") + .field("num1", 2.0f) + .field("date", "1970-01-01T00:00:25") + .endObject()) + .get(); client().admin().indices().prepareFlush().execute().actionGet(); client().prepareIndex("test", "type1", "3") - .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).field("date", "1970-01-01T00:02:00").endObject()) - .execute().actionGet(); + .setSource(jsonBuilder().startObject() + .field("test", "value beck") + .field("num1", 3.0f) + .field("date", "1970-01-01T00:02:00") + .endObject()) + .get(); client().admin().indices().refresh(refreshRequest()).actionGet(); logger.info("running doc['num1'].value"); SearchResponse response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script("doc['num1'].value")) - .addScriptField("sNum1_field", new Script("_fields['num1'].value")) - .addScriptField("date1", new Script("doc['date'].date.millis")) + .addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("sNum1_field", new Script("_fields['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("date1", new Script("doc['date'].date.millis", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) .execute().actionGet(); assertNoFailures(response); @@ -204,48 +298,48 @@ public class SearchFieldsTests extends ESIntegTestCase { Set fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); - assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0)); - assertThat((Double) response.getHits().getAt(0).fields().get("sNum1_field").values().get(0), equalTo(1.0)); - assertThat((Long) response.getHits().getAt(0).fields().get("date1").values().get(0), equalTo(0L)); + assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0)); + assertThat(response.getHits().getAt(0).fields().get("sNum1_field").values().get(0), equalTo(1.0)); + assertThat(response.getHits().getAt(0).fields().get("date1").values().get(0), equalTo(0L)); assertThat(response.getHits().getAt(1).id(), equalTo("2")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); - assertThat((Double) response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0)); - assertThat((Double) response.getHits().getAt(1).fields().get("sNum1_field").values().get(0), equalTo(2.0)); - assertThat((Long) response.getHits().getAt(1).fields().get("date1").values().get(0), equalTo(25000L)); + assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0)); + assertThat(response.getHits().getAt(1).fields().get("sNum1_field").values().get(0), equalTo(2.0)); + assertThat(response.getHits().getAt(1).fields().get("date1").values().get(0), equalTo(25000L)); assertThat(response.getHits().getAt(2).id(), equalTo("3")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); - assertThat((Double) response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0)); - assertThat((Double) response.getHits().getAt(2).fields().get("sNum1_field").values().get(0), equalTo(3.0)); - assertThat((Long) response.getHits().getAt(2).fields().get("date1").values().get(0), equalTo(120000L)); + assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0)); + assertThat(response.getHits().getAt(2).fields().get("sNum1_field").values().get(0), equalTo(3.0)); + assertThat(response.getHits().getAt(2).fields().get("date1").values().get(0), equalTo(120000L)); logger.info("running doc['num1'].value * factor"); Map params = MapBuilder.newMapBuilder().put("factor", 2.0).map(); response = client().prepareSearch() .setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script("doc['num1'].value * factor", ScriptType.INLINE, null, params)) - .execute().actionGet(); + .addScriptField("sNum1", new Script("doc['num1'].value * factor", ScriptType.INLINE, CustomScriptPlugin.NAME, params)) + .get(); assertThat(response.getHits().totalHits(), equalTo(3L)); assertThat(response.getHits().getAt(0).id(), equalTo("1")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); - assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0)); + assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).id(), equalTo("2")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); - assertThat((Double) response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(4.0)); + assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(4.0)); assertThat(response.getHits().getAt(2).id(), equalTo("3")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); - assertThat((Double) response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(6.0)); + assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(6.0)); } public void testUidBasedScriptFields() throws Exception { @@ -260,8 +354,11 @@ public class SearchFieldsTests extends ESIntegTestCase { indexRandom(true, indexRequestBuilders); SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs) - .addScriptField("uid", new Script("_fields._uid.value")).get(); + .setQuery(matchAllQuery()) + .addSort("num1", SortOrder.ASC) + .setSize(numDocs) + .addScriptField("uid", new Script("_fields._uid.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertNoFailures(response); @@ -271,12 +368,15 @@ public class SearchFieldsTests extends ESIntegTestCase { Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("uid"))); - assertThat((String)response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i))); + assertThat(response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i))); } response = client().prepareSearch() - .setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs) - .addScriptField("id", new Script("_fields._id.value")).get(); + .setQuery(matchAllQuery()) + .addSort("num1", SortOrder.ASC) + .setSize(numDocs) + .addScriptField("id", new Script("_fields._id.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertNoFailures(response); @@ -286,12 +386,15 @@ public class SearchFieldsTests extends ESIntegTestCase { Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("id"))); - assertThat((String)response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i))); + assertThat(response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i))); } response = client().prepareSearch() - .setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs) - .addScriptField("type", new Script("_fields._type.value")).get(); + .setQuery(matchAllQuery()) + .addSort("num1", SortOrder.ASC) + .setSize(numDocs) + .addScriptField("type", new Script("_fields._type.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertNoFailures(response); @@ -301,13 +404,17 @@ public class SearchFieldsTests extends ESIntegTestCase { Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("type"))); - assertThat((String)response.getHits().getAt(i).fields().get("type").value(), equalTo("type1")); + assertThat(response.getHits().getAt(i).fields().get("type").value(), equalTo("type1")); } response = client().prepareSearch() - .setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs) - .addScriptField("id", new Script("_fields._id.value")).addScriptField("uid", new Script("_fields._uid.value")) - .addScriptField("type", new Script("_fields._type.value")).get(); + .setQuery(matchAllQuery()) + .addSort("num1", SortOrder.ASC) + .setSize(numDocs) + .addScriptField("id", new Script("_fields._id.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("uid", new Script("_fields._uid.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("type", new Script("_fields._type.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertNoFailures(response); @@ -317,9 +424,9 @@ public class SearchFieldsTests extends ESIntegTestCase { Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("uid", "type", "id"))); - assertThat((String)response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i))); - assertThat((String)response.getHits().getAt(i).fields().get("type").value(), equalTo("type1")); - assertThat((String)response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i))); + assertThat(response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i))); + assertThat(response.getHits().getAt(i).fields().get("type").value(), equalTo("type1")); + assertThat(response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i))); } } @@ -335,10 +442,14 @@ public class SearchFieldsTests extends ESIntegTestCase { .execute().actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); - SearchResponse response = client().prepareSearch().setQuery(matchAllQuery()).addScriptField("s_obj1", new Script("_source.obj1")) - .addScriptField("s_obj1_test", new Script("_source.obj1.test")).addScriptField("s_obj2", new Script("_source.obj2")) - .addScriptField("s_obj2_arr2", new Script("_source.obj2.arr2")).addScriptField("s_arr3", new Script("_source.arr3")) - .execute().actionGet(); + SearchResponse response = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("s_obj1", new Script("_source.obj1", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("s_obj1_test", new Script("_source.obj1.test", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("s_obj2", new Script("_source.obj2", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("s_obj2_arr2", new Script("_source.obj2.arr2", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addScriptField("s_arr3", new Script("_source.arr3", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertThat("Failures " + Arrays.toString(response.getShardFailures()), response.getShardFailures().length, equalTo(0)); @@ -365,12 +476,13 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testScriptFieldsForNullReturn() throws Exception { client().prepareIndex("test", "type1", "1") - .setSource("foo", "bar") - .setRefreshPolicy("true").get(); + .setSource("foo", "bar") + .setRefreshPolicy("true").get(); - SearchResponse response = client().prepareSearch().setQuery(matchAllQuery()) - .addScriptField("test_script_1", new Script("return null")) - .get(); + SearchResponse response = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("test_script_1", new Script("return null", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertNoFailures(response); @@ -401,17 +513,53 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testStoredFieldsWithoutSource() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") - .startObject("byte_field").field("type", "byte").field("store", true).endObject() - .startObject("short_field").field("type", "short").field("store", true).endObject() - .startObject("integer_field").field("type", "integer").field("store", true).endObject() - .startObject("long_field").field("type", "long").field("store", true).endObject() - .startObject("float_field").field("type", "float").field("store", true).endObject() - .startObject("double_field").field("type", "double").field("store", true).endObject() - .startObject("date_field").field("type", "date").field("store", true).endObject() - .startObject("boolean_field").field("type", "boolean").field("store", true).endObject() - .startObject("binary_field").field("type", "binary").field("store", true).endObject() - .endObject().endObject().endObject().string(); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("byte_field") + .field("type", "byte") + .field("store", true) + .endObject() + .startObject("short_field") + .field("type", "short") + .field("store", true) + .endObject() + .startObject("integer_field") + .field("type", "integer") + .field("store", true) + .endObject() + .startObject("long_field") + .field("type", "long") + .field("store", true) + .endObject() + .startObject("float_field") + .field("type", "float") + .field("store", true) + .endObject() + .startObject("double_field") + .field("type", "double") + .field("store", true) + .endObject() + .startObject("date_field") + .field("type", "date") + .field("store", true) + .endObject() + .startObject("boolean_field") + .field("type", "boolean") + .field("store", true) + .endObject() + .startObject("binary_field") + .field("type", "binary") + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject() + .string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); @@ -449,17 +597,17 @@ public class SearchFieldsTests extends ESIntegTestCase { "float_field", "double_field", "date_field", "boolean_field", "binary_field"))); - assertThat(searchResponse.getHits().getAt(0).fields().get("byte_field").value().toString(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(0).fields().get("short_field").value().toString(), equalTo("2")); - assertThat(searchResponse.getHits().getAt(0).fields().get("integer_field").value(), equalTo((Object) 3)); - assertThat(searchResponse.getHits().getAt(0).fields().get("long_field").value(), equalTo((Object) 4L)); - assertThat(searchResponse.getHits().getAt(0).fields().get("float_field").value(), equalTo((Object) 5.0f)); - assertThat(searchResponse.getHits().getAt(0).fields().get("double_field").value(), equalTo((Object) 6.0d)); + SearchHit searchHit = searchResponse.getHits().getAt(0); + assertThat(searchHit.fields().get("byte_field").value().toString(), equalTo("1")); + assertThat(searchHit.fields().get("short_field").value().toString(), equalTo("2")); + assertThat(searchHit.fields().get("integer_field").value(), equalTo((Object) 3)); + assertThat(searchHit.fields().get("long_field").value(), equalTo((Object) 4L)); + assertThat(searchHit.fields().get("float_field").value(), equalTo((Object) 5.0f)); + assertThat(searchHit.fields().get("double_field").value(), equalTo((Object) 6.0d)); String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)); - assertThat(searchResponse.getHits().getAt(0).fields().get("date_field").value(), equalTo((Object) dateTime)); - assertThat(searchResponse.getHits().getAt(0).fields().get("boolean_field").value(), equalTo((Object) Boolean.TRUE)); - assertThat(((BytesReference) searchResponse.getHits().getAt(0).fields().get("binary_field").value()), equalTo((BytesReference) new BytesArray("testing text".getBytes("UTF8")))); - + assertThat(searchHit.fields().get("date_field").value(), equalTo((Object) dateTime)); + assertThat(searchHit.fields().get("boolean_field").value(), equalTo((Object) Boolean.TRUE)); + assertThat(searchHit.fields().get("binary_field").value(), equalTo(new BytesArray("testing text" .getBytes("UTF8")))); } public void testSearchFieldsMetaData() throws Exception { @@ -575,25 +723,57 @@ public class SearchFieldsTests extends ESIntegTestCase { new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).fieldDataField("test_field")).get(); assertHitCount(searchResponse, 1); Map fields = searchResponse.getHits().getHits()[0].getFields(); - assertThat((String)fields.get("test_field").value(), equalTo("foobar")); + assertThat(fields.get("test_field").value(), equalTo("foobar")); } public void testFieldsPulledFromFieldData() throws Exception { createIndex("test"); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") - .startObject("text_field").field("type", "text").field("fielddata", true).endObject() - .startObject("keyword_field").field("type", "keyword").endObject() - .startObject("byte_field").field("type", "byte").endObject() - .startObject("short_field").field("type", "short").endObject() - .startObject("integer_field").field("type", "integer").endObject() - .startObject("long_field").field("type", "long").endObject() - .startObject("float_field").field("type", "float").endObject() - .startObject("double_field").field("type", "double").endObject() - .startObject("date_field").field("type", "date").endObject() - .startObject("boolean_field").field("type", "boolean").endObject() - .startObject("binary_field").field("type", "binary").endObject() - .endObject().endObject().endObject().string(); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("text_field") + .field("type", "text") + .field("fielddata", true) + .endObject() + .startObject("keyword_field") + .field("type", "keyword") + .endObject() + .startObject("byte_field") + .field("type", "byte") + .endObject() + .startObject("short_field") + .field("type", "short") + .endObject() + .startObject("integer_field") + .field("type", "integer") + .endObject() + .startObject("long_field") + .field("type", "long") + .endObject() + .startObject("float_field") + .field("type", "float") + .endObject() + .startObject("double_field") + .field("type", "double") + .endObject() + .startObject("date_field") + .field("type", "date") + .endObject() + .startObject("boolean_field") + .field("type", "boolean") + .endObject() + .startObject("binary_field") + .field("type", "binary") + .endObject() + .endObject() + .endObject() + .endObject() + .string(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet(); @@ -667,7 +847,7 @@ public class SearchFieldsTests extends ESIntegTestCase { ensureSearchable(); SearchRequestBuilder req = client().prepareSearch("index"); for (String field : Arrays.asList("s", "ms", "l", "ml", "d", "md")) { - req.addScriptField(field, new Script("doc['" + field + "'].values")); + req.addScriptField(field, new Script("doc['" + field + "'].values", ScriptType.INLINE, CustomScriptPlugin.NAME, null)); } SearchResponse resp = req.get(); assertSearchResponse(resp); @@ -690,11 +870,11 @@ public class SearchFieldsTests extends ESIntegTestCase { indexRandom(true, client().prepareIndex("test", "my-type1", "1") - .setRouting("1") - .setTimestamp("205097") - .setTTL(10000000000000L) - .setParent("parent_1") - .setSource(jsonBuilder().startObject().field("field1", "value").endObject())); + .setRouting("1") + .setTimestamp("205097") + .setTTL(10000000000000L) + .setParent("parent_1") + .setSource(jsonBuilder().startObject().field("field1", "value").endObject())); SearchResponse response = client().prepareSearch("test").addStoredField("field1").get(); assertSearchResponse(response); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java similarity index 66% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java rename to core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 6afa738569c..a5eb37f67b1 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -16,26 +16,27 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.functionscore; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.CoreMatchers; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; import java.util.HashMap; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; @@ -44,6 +45,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; +import static org.elasticsearch.script.MockScriptPlugin.NAME; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.allOf; @@ -54,11 +56,41 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.nullValue; -public class RandomScoreFunctionTests extends ESIntegTestCase { +public class RandomScoreFunctionIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return pluginList(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("log(doc['index'].value + (factor * _score))", + vars -> scoringScript(vars, ScoreAccessor::doubleValue)); + scripts.put("log(doc['index'].value + (factor * _score.intValue()))", + vars -> scoringScript(vars, ScoreAccessor::intValue)); + scripts.put("log(doc['index'].value + (factor * _score.longValue()))", + vars -> scoringScript(vars, ScoreAccessor::longValue)); + scripts.put("log(doc['index'].value + (factor * _score.floatValue()))", + vars -> scoringScript(vars, ScoreAccessor::floatValue)); + scripts.put("log(doc['index'].value + (factor * _score.doubleValue()))", + vars -> scoringScript(vars, ScoreAccessor::doubleValue)); + return scripts; + } + + @SuppressWarnings("unchecked") + static Double scoringScript(Map vars, Function scoring) { + Map doc = (Map) vars.get("doc"); + Double index = ((Number) ((ScriptDocValues) doc.get("index")).getValues().get(0)).doubleValue(); + Double score = scoring.apply((ScoreAccessor) vars.get("_score")).doubleValue(); + Integer factor = (Integer) vars.get("factor"); + return Math.log(index + (factor * score)); + } } public void testConsistentHitsWithSameSeed() throws Exception { @@ -86,17 +118,15 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { .setPreference(preference) .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed))) .execute().actionGet(); - assertThat("Failures " + Arrays.toString(searchResponse.getShardFailures()), searchResponse.getShardFailures().length, CoreMatchers.equalTo(0)); + assertThat("Failures " + Arrays.toString(searchResponse.getShardFailures()), + searchResponse.getShardFailures().length, CoreMatchers.equalTo(0)); final int hitCount = searchResponse.getHits().getHits().length; final SearchHit[] currentHits = searchResponse.getHits().getHits(); - ArrayUtil.timSort(currentHits, new Comparator() { - @Override - public int compare(SearchHit o1, SearchHit o2) { - // for tie-breaking we have to resort here since if the score is - // identical we rely on collection order which might change. - int cmp = Float.compare(o1.getScore(), o2.getScore()); - return cmp == 0 ? o1.getId().compareTo(o2.getId()) : cmp; - } + ArrayUtil.timSort(currentHits, (o1, o2) -> { + // for tie-breaking we have to resort here since if the score is + // identical we rely on collection order which might change. + int cmp = Float.compare(o1.getScore(), o2.getScore()); + return cmp == 0 ? o1.getId().compareTo(o2.getId()) : cmp; }); if (i == 0) { assertThat(hits, nullValue()); @@ -128,71 +158,92 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { int docCount = randomIntBetween(100, 200); for (int i = 0; i < docCount; i++) { - client().prepareIndex("test", "type", "" + i).setSource("body", randomFrom(Arrays.asList("foo", "bar", "baz")), "index", i + 1) // we add 1 to the index field to make sure that the scripts below never compute log(0) + client().prepareIndex("test", "type", "" + i) + // we add 1 to the index field to make sure that the scripts below never compute log(0) + .setSource("body", randomFrom(Arrays.asList("foo", "bar", "baz")), "index", i + 1) .get(); } refresh(); Map params = new HashMap<>(); params.put("factor", randomIntBetween(2, 4)); + // Test for accessing _score + Script script = new Script("log(doc['index'].value + (factor * _score))", ScriptType.INLINE, NAME, params); SearchResponse resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score))", ScriptType.INLINE, null, params))) - })).get(); + functionScoreQuery(matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) + } + )) + .get(); assertNoFailures(resp); SearchHit firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.intValue() + script = new Script("log(doc['index'].value + (factor * _score.intValue()))", ScriptType.INLINE, NAME, params); resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.intValue()))", ScriptType.INLINE, null, params))) - })).get(); + functionScoreQuery(matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) + } + )) + .get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.longValue() + script = new Script("log(doc['index'].value + (factor * _score.longValue()))", ScriptType.INLINE, NAME, params); resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.longValue()))", ScriptType.INLINE, null, params))) - })).get(); + functionScoreQuery(matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) + } + )) + .get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.floatValue() + script = new Script("log(doc['index'].value + (factor * _score.floatValue()))", ScriptType.INLINE, NAME, params); resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.floatValue()))", - ScriptType.INLINE, null, params))) - })).get(); + functionScoreQuery(matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) + } + )) + .get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.doubleValue() + script = new Script("log(doc['index'].value + (factor * _score.doubleValue()))", ScriptType.INLINE, NAME, params); resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + functionScoreQuery(matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.doubleValue()))", - ScriptType.INLINE, null, params))) - })).get(); + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) + } + )) + .get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); @@ -208,9 +259,9 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { int seed = 12345678; SearchResponse resp = client().prepareSearch("test") - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed))) - .setExplain(true) - .get(); + .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed))) + .setExplain(true) + .get(); assertNoFailures(resp); assertEquals(1, resp.getHits().totalHits()); SearchHit firstHit = resp.getHits().getAt(0); @@ -222,8 +273,8 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { ensureGreen(); SearchResponse resp = client().prepareSearch("test") - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(1234))) - .get(); + .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(1234))) + .get(); assertNoFailures(resp); assertEquals(0, resp.getHits().totalHits()); } @@ -243,9 +294,9 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { for (int i = 0; i < iters; ++i) { int seed = randomInt(); SearchResponse searchResponse = client().prepareSearch() - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed))) - .setSize(docCount) - .execute().actionGet(); + .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed))) + .setSize(docCount) + .execute().actionGet(); assertNoFailures(searchResponse); for (SearchHit hit : searchResponse.getHits().getHits()) { @@ -264,19 +315,19 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { flushAndRefresh(); assertNoFailures(client().prepareSearch() - .setSize(docCount) // get all docs otherwise we are prone to tie-breaking - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomInt()))) - .execute().actionGet()); + .setSize(docCount) // get all docs otherwise we are prone to tie-breaking + .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomInt()))) + .execute().actionGet()); assertNoFailures(client().prepareSearch() - .setSize(docCount) // get all docs otherwise we are prone to tie-breaking - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomLong()))) - .execute().actionGet()); + .setSize(docCount) // get all docs otherwise we are prone to tie-breaking + .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomLong()))) + .execute().actionGet()); assertNoFailures(client().prepareSearch() - .setSize(docCount) // get all docs otherwise we are prone to tie-breaking - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomRealisticUnicodeOfLengthBetween(10, 20)))) - .execute().actionGet()); + .setSize(docCount) // get all docs otherwise we are prone to tie-breaking + .setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomRealisticUnicodeOfLengthBetween(10, 20)))) + .execute().actionGet()); } public void checkDistribution() throws Exception { @@ -344,5 +395,4 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { logger.info("mean: {}", sum / (double) count); } - } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java similarity index 52% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java rename to core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java index 0d7dd4e12e2..cb574ce46f3 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.geo; import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchResponse; @@ -27,32 +27,71 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.closeTo; -/** - */ -public class GeoDistanceTests extends ESIntegTestCase { +public class GeoDistanceIT extends ESIntegTestCase { + + private static final double source_lat = 32.798; + private static final double source_long = -117.151; + private static final double target_lat = 32.81; + private static final double target_long = -117.21; + @Override protected Collection> nodePlugins() { - return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class); + return pluginList(CustomScriptPlugin.class, InternalSettingsPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("arcDistance", vars -> distanceScript(vars, + location -> location.arcDistance(target_lat, target_long))); + scripts.put("distance", vars -> distanceScript(vars, + location -> location.distance(target_lat, target_long))); + scripts.put("arcDistanceInKm", vars -> distanceScript(vars, + location -> location.arcDistanceInKm(target_lat, target_long))); + scripts.put("distanceInKm", vars -> distanceScript(vars, + location -> location.distanceInKm(target_lat, target_long))); + scripts.put("arcDistanceInKm(lat, lon + 360)", vars -> distanceScript(vars, + location -> location.arcDistanceInKm(target_lat, target_long + 360))); + scripts.put("arcDistanceInKm(lat + 360, lon)", vars -> distanceScript(vars, + location -> location.arcDistanceInKm(target_lat + 360, target_long))); + scripts.put("arcDistanceInMiles", vars -> distanceScript(vars, + location -> location.arcDistanceInMiles(target_lat, target_long))); + scripts.put("distanceInMiles", vars -> distanceScript(vars, + location -> location.distanceInMiles(target_lat, target_long))); + + return scripts; + } + + @SuppressWarnings("unchecked") + static Double distanceScript(Map vars, Function distance) { + Map doc = (Map) vars.get("doc"); + return distance.apply((ScriptDocValues.GeoPoints) doc.get("location")); + } } public void testDistanceScript() throws Exception { - double source_lat = 32.798; - double source_long = -117.151; - double target_lat = 32.81; - double target_long = -117.21; Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -65,69 +104,78 @@ public class GeoDistanceTests extends ESIntegTestCase { assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() - .field("name", "TestPosition") - .startObject("location").field("lat", source_lat).field("lon", source_long).endObject() - .endObject()).execute().actionGet(); + client().prepareIndex("test", "type1", "1") + .setSource(jsonBuilder().startObject() + .field("name", "TestPosition") + .startObject("location") + .field("lat", source_lat) + .field("lon", source_long) + .endObject() + .endObject()) + .get(); refresh(); + // Test doc['location'].arcDistance(lat, lon) SearchResponse searchResponse1 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("doc['location'].arcDistance(" + target_lat + "," + target_long + ")")).execute() - .actionGet(); + .addScriptField("distance", new Script("arcDistance", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance1, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d)); + // Test doc['location'].distance(lat, lon) SearchResponse searchResponse2 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("doc['location'].distance(" + target_lat + "," + target_long + ")")).execute() - .actionGet(); + .addScriptField("distance", new Script("distance", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance2, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d)); + // Test doc['location'].arcDistanceInKm(lat, lon) SearchResponse searchResponse3 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + target_lat + "," + target_long + ")")) - .execute().actionGet(); + .addScriptField("distance", new Script("arcDistanceInKm", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); Double resultArcDistance3 = searchResponse3.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance3, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d)); + // Test doc['location'].distanceInKm(lat, lon) SearchResponse searchResponse4 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("doc['location'].distanceInKm(" + target_lat + "," + target_long + ")")).execute() - .actionGet(); + .addScriptField("distance", new Script("distanceInKm", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance4, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d)); - SearchResponse searchResponse5 = client() - .prepareSearch() - .addStoredField("_source") - .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat) + "," + (target_long + 360) + ")")) - .execute().actionGet(); + // Test doc['location'].arcDistanceInKm(lat, lon + 360) + SearchResponse searchResponse5 = client().prepareSearch().addStoredField("_source") + .addScriptField("distance", new Script("arcDistanceInKm(lat, lon + 360)", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); Double resultArcDistance5 = searchResponse5.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance5, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d)); - SearchResponse searchResponse6 = client() - .prepareSearch() - .addStoredField("_source") - .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat + 360) + "," + (target_long) + ")")) - .execute().actionGet(); + // Test doc['location'].arcDistanceInKm(lat + 360, lon) + SearchResponse searchResponse6 = client().prepareSearch().addStoredField("_source") + .addScriptField("distance", new Script("arcDistanceInKm(lat + 360, lon)", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); Double resultArcDistance6 = searchResponse6.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance6, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d)); + // Test doc['location'].arcDistanceInMiles(lat, lon) SearchResponse searchResponse7 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("doc['location'].arcDistanceInMiles(" + target_lat + "," + target_long + ")")) - .execute().actionGet(); + .addScriptField("distance", new Script("arcDistanceInMiles", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); Double resultDistance7 = searchResponse7.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance7, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.01d)); + // Test doc['location'].distanceInMiles(lat, lon) SearchResponse searchResponse8 = client().prepareSearch().addStoredField("_source") - .addScriptField("distance", new Script("doc['location'].distanceInMiles(" + target_lat + "," + target_long + ")")) - .execute().actionGet(); + .addScriptField("distance", new Script("distanceInMiles", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); Double resultDistance8 = searchResponse8.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance8, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.01d)); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java similarity index 91% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java rename to core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java index 73943dfaaba..4294547e7fc 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoShapeIntegrationTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.geo; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -26,25 +26,13 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.test.ESIntegTestCase; -import java.util.Collection; -import java.util.Collections; - import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -/** - */ -public class GeoShapeIntegrationTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); - } +public class GeoShapeIntegrationIT extends ESIntegTestCase { /** * Test that orientation parameter correctly persists across cluster restart diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index 780e8cbcad4..974d0ade282 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -83,7 +83,7 @@ public class InnerHitsIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override protected Map, Object>> pluginScripts() { - return Collections.emptyMap(); + return Collections.singletonMap("5", script -> "5"); } } diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 800b28d59df..ec18e528a40 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -149,7 +149,7 @@ public class SimpleNestedIT extends ESIntegTestCase { // check delete, so all is gone... DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "2").execute().actionGet(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); // flush, so we fetch it from the index (as see that we filter nested docs) flush(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java b/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java similarity index 68% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java rename to core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index e1620c8f619..6422bf7a134 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java +++ b/core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -17,15 +17,16 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.scriptfilter; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -34,19 +35,47 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; import static org.hamcrest.Matchers.equalTo; -/** - * - */ @ESIntegTestCase.ClusterScope(scope= ESIntegTestCase.Scope.SUITE) -public class ScriptQuerySearchTests extends ESIntegTestCase { +public class ScriptQuerySearchIT extends ESIntegTestCase { + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("doc['num1'].value", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get("num1"); + }); + + scripts.put("doc['num1'].value > 1", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1"); + return num1.getValue() > 1; + }); + + scripts.put("doc['num1'].value > param1", vars -> { + Integer param1 = (Integer) vars.get("param1"); + + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1"); + return num1.getValue() > param1; + }); + + return scripts; + } } @Override @@ -62,21 +91,23 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { createIndex("test"); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject()) - .execute().actionGet(); + .get(); flush(); client().prepareIndex("test", "type1", "2") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).endObject()) - .execute().actionGet(); + .get(); flush(); client().prepareIndex("test", "type1", "3") .setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).endObject()) - .execute().actionGet(); + .get(); refresh(); logger.info("running doc['num1'].value > 1"); SearchResponse response = client().prepareSearch() - .setQuery(scriptQuery(new Script("doc['num1'].value > 1"))).addSort("num1", SortOrder.ASC) - .addScriptField("sNum1", new Script("doc['num1'].value")).execute().actionGet(); + .setQuery(scriptQuery(new Script("doc['num1'].value > 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))) + .addSort("num1", SortOrder.ASC) + .addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertThat(response.getHits().totalHits(), equalTo(2L)); assertThat(response.getHits().getAt(0).id(), equalTo("2")); @@ -90,8 +121,10 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { logger.info("running doc['num1'].value > param1"); response = client() .prepareSearch() - .setQuery(scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, null, params))) - .addSort("num1", SortOrder.ASC).addScriptField("sNum1", new Script("doc['num1'].value")).execute().actionGet(); + .setQuery(scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, CustomScriptPlugin.NAME, params))) + .addSort("num1", SortOrder.ASC) + .addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertThat(response.getHits().totalHits(), equalTo(1L)); assertThat(response.getHits().getAt(0).id(), equalTo("3")); @@ -102,9 +135,10 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { logger.info("running doc['num1'].value > param1"); response = client() .prepareSearch() - .setQuery( - scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, null, params))) - .addSort("num1", SortOrder.ASC).addScriptField("sNum1", new Script("doc['num1'].value")).execute().actionGet(); + .setQuery(scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, CustomScriptPlugin.NAME, params))) + .addSort("num1", SortOrder.ASC) + .addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .get(); assertThat(response.getHits().totalHits(), equalTo(3L)); assertThat(response.getHits().getAt(0).id(), equalTo("1")); diff --git a/core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java b/core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java new file mode 100644 index 00000000000..53cf4d07e99 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -0,0 +1,476 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + + +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.script.Script; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.function.Function; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.script.ScriptService.ScriptType; +import static org.elasticsearch.search.sort.SortBuilders.scriptSort; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class SimpleSortIT extends ESIntegTestCase { + + private static final String DOUBLE_APOSTROPHE = "\u0027\u0027"; + + @Override + protected Collection> nodePlugins() { + return pluginList(CustomScriptPlugin.class, InternalSettingsPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("doc['str_value'].value", vars -> { + Map doc = (Map) vars.get("doc"); + return ((ScriptDocValues.Strings) doc.get("str_value")).getValue(); + }); + + scripts.put("doc['id'].value", vars -> { + Map doc = (Map) vars.get("doc"); + return ((ScriptDocValues.Strings) doc.get("id")).getValue(); + }); + + scripts.put("doc['id'].values[0]", vars -> { + Map doc = (Map) vars.get("doc"); + return ((ScriptDocValues.Strings) doc.get("id")).getValues().get(0); + }); + + scripts.put("get min long", vars -> getMinValueScript(vars, Long.MAX_VALUE, "lvalue", l -> (Long) l)); + scripts.put("get min double", vars -> getMinValueScript(vars, Double.MAX_VALUE, "dvalue", d -> (Double) d)); + scripts.put("get min string", vars -> getMinValueScript(vars, Integer.MAX_VALUE, "svalue", s -> Integer.parseInt((String) s))); + scripts.put("get min geopoint lon", vars -> getMinValueScript(vars, Double.MAX_VALUE, "gvalue", g -> ((GeoPoint) g).getLon())); + + scripts.put(DOUBLE_APOSTROPHE, vars -> DOUBLE_APOSTROPHE); + + return scripts; + } + + /** + * Return the minimal value from a set of values. + */ + @SuppressWarnings("unchecked") + static > T getMinValueScript(Map vars, T initialValue, String fieldName, + Function converter) { + T retval = initialValue; + Map doc = (Map) vars.get("doc"); + ScriptDocValues values = (ScriptDocValues) doc.get(fieldName); + for (Object v : values.getValues()) { + T value = converter.apply(v); + retval = (value.compareTo(retval) < 0) ? value : retval; + } + return retval; + } + } + + public void testSimpleSorts() throws Exception { + Random random = random(); + assertAcked(prepareCreate("test") + .addMapping("type1", jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("str_value") + .field("type", "keyword") + .endObject() + .startObject("boolean_value") + .field("type", "boolean") + .endObject() + .startObject("byte_value") + .field("type", "byte") + .endObject() + .startObject("short_value") + .field("type", "short") + .endObject() + .startObject("integer_value") + .field("type", "integer") + .endObject() + .startObject("long_value") + .field("type", "long") + .endObject() + .startObject("float_value") + .field("type", "float") + .endObject() + .startObject("double_value") + .field("type", "double") + .endObject() + .endObject() + .endObject() + .endObject())); + ensureGreen(); + List builders = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + builders.add(client().prepareIndex("test", "type1", Integer.toString(i)) + .setSource(jsonBuilder() + .startObject() + .field("str_value", new String(new char[]{(char) (97 + i), (char) (97 + i)})) + .field("boolean_value", true) + .field("byte_value", i) + .field("short_value", i) + .field("integer_value", i) + .field("long_value", i) + .field("float_value", 0.1 * i) + .field("double_value", 0.1 * i) + .endObject() + )); + } + Collections.shuffle(builders, random); + for (IndexRequestBuilder builder : builders) { + builder.execute().actionGet(); + if (random.nextBoolean()) { + if (random.nextInt(5) != 0) { + refresh(); + } else { + client().admin().indices().prepareFlush().get(); + } + } + } + refresh(); + + // STRING script + int size = 1 + random.nextInt(10); + + Script script = new Script("doc['str_value'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(size) + .addSort(new ScriptSortBuilder(script, ScriptSortType.STRING)) + .get(); + + assertHitCount(searchResponse, 10); + assertThat(searchResponse.getHits().hits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat(searchHit.id(), equalTo(Integer.toString(i))); + + String expected = new String(new char[]{(char) (97 + i), (char) (97 + i)}); + assertThat(searchHit.sortValues()[0].toString(), equalTo(expected)); + } + + size = 1 + random.nextInt(10); + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(size) + .addSort("str_value", SortOrder.DESC) + .get(); + + assertHitCount(searchResponse, 10); + assertThat(searchResponse.getHits().hits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat(searchHit.id(), equalTo(Integer.toString(9 - i))); + + String expected = new String(new char[]{(char) (97 + (9 - i)), (char) (97 + (9 - i))}); + assertThat(searchHit.sortValues()[0].toString(), equalTo(expected)); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + assertNoFailures(searchResponse); + } + + public void testSortMinValueScript() throws IOException { + String mapping = jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("lvalue") + .field("type", "long") + .endObject() + .startObject("dvalue") + .field("type", "double") + .endObject() + .startObject("svalue") + .field("type", "keyword") + .endObject() + .startObject("gvalue") + .field("type", "geo_point") + .endObject() + .endObject() + .endObject() + .endObject().string(); + + assertAcked(prepareCreate("test").addMapping("type1", mapping)); + ensureGreen(); + + for (int i = 0; i < 10; i++) { + client().prepareIndex("test", "type1", "" + i) + .setSource(jsonBuilder() + .startObject() + .field("ord", i) + .field("svalue", new String[]{"" + i, "" + (i + 1), "" + (i + 2)}) + .field("lvalue", new long[]{i, i + 1, i + 2}) + .field("dvalue", new double[]{i, i + 1, i + 2}) + .startObject("gvalue") + .field("lat", (double) i + 1) + .field("lon", (double) i) + .endObject() + .endObject()) + .get(); + } + + for (int i = 10; i < 20; i++) { // add some docs that don't have values in those fields + client().prepareIndex("test", "type1", "" + i) + .setSource(jsonBuilder() + .startObject() + .field("ord", i) + .endObject()) + .get(); + } + client().admin().indices().prepareRefresh("test").get(); + + // test the long values + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("min", new Script("get min long", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) + .setSize(10) + .get(); + + assertNoFailures(searchResponse); + + assertHitCount(searchResponse, 20L); + for (int i = 0; i < 10; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").value(), equalTo((long) i)); + } + + // test the double values + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("min", new Script("get min double", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) + .setSize(10) + .get(); + + assertNoFailures(searchResponse); + + assertHitCount(searchResponse, 20L); + for (int i = 0; i < 10; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").value(), equalTo((double) i)); + } + + // test the string values + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("min", new Script("get min string", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) + .setSize(10) + .get(); + + assertNoFailures(searchResponse); + + assertHitCount(searchResponse, 20L); + for (int i = 0; i < 10; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").value(), equalTo(i)); + } + + // test the geopoint values + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("min", new Script("get min geopoint lon", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) + .setSize(10) + .get(); + + assertNoFailures(searchResponse); + + assertHitCount(searchResponse, 20L); + for (int i = 0; i < 10; i++) { + SearchHit searchHit = searchResponse.getHits().getAt(i); + assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").value(), closeTo(i, GeoUtils.TOLERANCE)); + } + } + + public void testDocumentsWithNullValue() throws Exception { + // TODO: sort shouldn't fail when sort field is mapped dynamically + // We have to specify mapping explicitly because by the time search is performed dynamic mapping might not + // be propagated to all nodes yet and sort operation fail when the sort field is not defined + String mapping = jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("id") + .field("type", "keyword") + .endObject() + .startObject("svalue") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject().string(); + assertAcked(prepareCreate("test").addMapping("type1", mapping)); + ensureGreen(); + + client().prepareIndex("test", "type1") + .setSource(jsonBuilder().startObject() + .field("id", "1") + .field("svalue", "aaa") + .endObject()) + .get(); + + client().prepareIndex("test", "type1") + .setSource(jsonBuilder().startObject() + .field("id", "2") + .nullField("svalue") + .endObject()) + .get(); + + client().prepareIndex("test", "type1") + .setSource(jsonBuilder().startObject() + .field("id", "3") + .field("svalue", "bbb") + .endObject()) + .get(); + + flush(); + refresh(); + + Script scripField = new Script("doc['id'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("id", scripField) + .addSort("svalue", SortOrder.ASC) + .get(); + + assertNoFailures(searchResponse); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L)); + assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("1")); + assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("3")); + assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2")); + + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("id", new Script("doc['id'].values[0]", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) + .addSort("svalue", SortOrder.ASC) + .get(); + + assertNoFailures(searchResponse); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L)); + assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("1")); + assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("3")); + assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2")); + + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addScriptField("id", scripField) + .addSort("svalue", SortOrder.DESC) + .get(); + + if (searchResponse.getFailedShards() > 0) { + logger.warn("Failed shards:"); + for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { + logger.warn("-> {}", shardSearchFailure); + } + } + assertThat(searchResponse.getFailedShards(), equalTo(0)); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L)); + assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("3")); + assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("1")); + assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2")); + + // a query with docs just with null values + searchResponse = client().prepareSearch() + .setQuery(termQuery("id", "2")) + .addScriptField("id", scripField) + .addSort("svalue", SortOrder.DESC) + .get(); + + if (searchResponse.getFailedShards() > 0) { + logger.warn("Failed shards:"); + for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { + logger.warn("-> {}", shardSearchFailure); + } + } + assertThat(searchResponse.getFailedShards(), equalTo(0)); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); + assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("2")); + } + + public void test2920() throws IOException { + assertAcked(prepareCreate("test") + .addMapping("test", jsonBuilder() + .startObject() + .startObject("test") + .startObject("properties") + .startObject("value") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject())); + ensureGreen(); + for (int i = 0; i < 10; i++) { + client().prepareIndex("test", "test", Integer.toString(i)) + .setSource(jsonBuilder().startObject().field("value", "" + i).endObject()).get(); + } + refresh(); + + Script sortScript = new Script("\u0027\u0027", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .addSort(scriptSort(sortScript, ScriptSortType.STRING)) + .setSize(10) + .get(); + assertNoFailures(searchResponse); + } +} diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java b/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java similarity index 93% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java rename to core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java index 52b2f5af797..3162e94f3e2 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java +++ b/core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.stats; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -32,19 +32,22 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.search.stats.SearchStats.Stats; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.Map; import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -56,13 +59,23 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -/** - */ @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SearchStatsTests extends ESIntegTestCase { +public class SearchStatsIT extends ESIntegTestCase { + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + protected Map, Object>> pluginScripts() { + return Collections.singletonMap("_source.field", vars -> { + Map src = (Map) vars.get("_source"); + return src.get("field"); + }); + } } @Override @@ -108,7 +121,7 @@ public class SearchStatsTests extends ESIntegTestCase { SearchResponse searchResponse = internalCluster().coordOnlyNodeClient().prepareSearch() .setQuery(QueryBuilders.termQuery("field", "value")).setStats("group1", "group2") .highlighter(new HighlightBuilder().field("field")) - .addScriptField("scrip1", new Script("_source.field")) + .addScriptField("script1", new Script("_source.field", ScriptType.INLINE, CustomScriptPlugin.NAME, null)) .setSize(100) .execute().actionGet(); assertHitCount(searchResponse, docsTest1 + docsTest2); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 313bf065b0a..27d8dad2943 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -61,7 +61,9 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.indices.InvalidIndexNameException; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -884,7 +886,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); logger.info("--> delete index metadata and shard metadata"); - Path metadata = repo.resolve("meta-test-snap-1.dat"); + Path metadata = repo.resolve("meta-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat"); Files.delete(metadata); logger.info("--> delete snapshot"); @@ -917,7 +919,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); logger.info("--> truncate snapshot file to make it unreadable"); - Path snapshotPath = repo.resolve("snap-test-snap-1-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat"); + Path snapshotPath = repo.resolve("snap-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat"); try(SeekableByteChannel outChan = Files.newByteChannel(snapshotPath, StandardOpenOption.WRITE)) { outChan.truncate(randomInt(10)); } @@ -2017,6 +2019,9 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); logger.info("--> emulate an orphan snapshot"); + RepositoriesService repositoriesService = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName()); + final RepositoryData repositoryData = repositoriesService.repository(repositoryName).getRepositoryData(); + final IndexId indexId = repositoryData.resolveIndexId(idxName); clusterService.submitStateUpdateTask("orphan snapshot test", new ClusterStateUpdateTask() { @@ -2033,7 +2038,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas true, false, State.ABORTED, - Collections.singletonList(idxName), + Collections.singletonList(indexId), System.currentTimeMillis(), shards.build())); return ClusterState.builder(currentState).putCustom(SnapshotsInProgress.TYPE, new SnapshotsInProgress(Collections.unmodifiableList(entries))).build(); @@ -2189,7 +2194,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); logger.info("--> truncate snapshot file to make it unreadable"); - Path snapshotPath = repo.resolve("snap-test-snap-2-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat"); + Path snapshotPath = repo.resolve("snap-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat"); try(SeekableByteChannel outChan = Files.newByteChannel(snapshotPath, StandardOpenOption.WRITE)) { outChan.truncate(randomInt(10)); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index d17f0ea82c9..60c5e014828 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -48,6 +48,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.snapshots.SnapshotId; @@ -112,8 +113,8 @@ public class MockRepository extends FsRepository { } @Override - public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { - if (blockOnInitialization ) { + public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { + if (blockOnInitialization) { blockExecution(); } super.initializeSnapshot(snapshotId, indices, clusterMetadata); diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java index a36cd96cee2..5e614d244c4 100644 --- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java +++ b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java @@ -106,14 +106,14 @@ public class SimpleTTLIT extends ESIntegTestCase { long now = System.currentTimeMillis(); IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1") .setTimestamp(String.valueOf(now)).setTTL(providedTTLValue).setRefreshPolicy(IMMEDIATE).get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = client().prepareIndex("test", "type1", "with_routing").setSource("field1", "value1") .setTimestamp(String.valueOf(now)).setTTL(providedTTLValue).setRouting("routing").setRefreshPolicy(IMMEDIATE).get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = client().prepareIndex("test", "type1", "no_ttl").setSource("field1", "value1").get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); indexResponse = client().prepareIndex("test", "type2", "default_ttl").setSource("field1", "value1").get(); - assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); // realtime get check long currentTime = System.currentTimeMillis(); @@ -259,7 +259,7 @@ public class SimpleTTLIT extends ESIntegTestCase { long thirdTtl = aLongTime * 1; IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1") .setTTL(firstTtl).setRefreshPolicy(IMMEDIATE).get(); - assertTrue(indexResponse.getOperation() == DocWriteResponse.Operation.CREATE); + assertTrue(indexResponse.getResult() == DocWriteResponse.Result.CREATED); assertThat(getTtl("type1", 1), both(lessThanOrEqualTo(firstTtl)).and(greaterThan(secondTtl))); // Updating with the default detect_noop without a change to the document doesn't change the ttl. diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index 53fda83146d..a71bd466ad8 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -371,7 +371,7 @@ public class UpdateIT extends ESIntegTestCase { .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)) .execute().actionGet(); - assertEquals(DocWriteResponse.Operation.CREATE, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { @@ -383,7 +383,7 @@ public class UpdateIT extends ESIntegTestCase { .setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject()) .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)) .execute().actionGet(); - assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { @@ -412,7 +412,7 @@ public class UpdateIT extends ESIntegTestCase { .setScriptedUpsert(true) .setScript(new Script("", ScriptService.ScriptType.INLINE, "scripted_upsert", params)) .execute().actionGet(); - assertEquals(DocWriteResponse.Operation.CREATE, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { @@ -426,7 +426,7 @@ public class UpdateIT extends ESIntegTestCase { .setScriptedUpsert(true) .setScript(new Script("", ScriptService.ScriptType.INLINE, "scripted_upsert", params)) .execute().actionGet(); - assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { @@ -582,7 +582,7 @@ public class UpdateIT extends ESIntegTestCase { UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(2L)); - assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { @@ -595,7 +595,7 @@ public class UpdateIT extends ESIntegTestCase { updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", params)).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(3L)); - assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { @@ -607,7 +607,7 @@ public class UpdateIT extends ESIntegTestCase { updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("op", "none")))).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(3L)); - assertEquals(DocWriteResponse.Operation.NOOP, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.NOOP, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { @@ -619,7 +619,7 @@ public class UpdateIT extends ESIntegTestCase { updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("op", "delete")))).execute().actionGet(); assertThat(updateResponse.getVersion(), equalTo(4L)); - assertEquals(DocWriteResponse.Operation.DELETE, updateResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult()); assertThat(updateResponse.getIndex(), equalTo("test")); for (int i = 0; i < 5; i++) { diff --git a/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java index 9964551238a..67e7d528e59 100644 --- a/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -59,7 +59,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { // Note - external version doesn't throw version conflicts on deletes of non existent records. This is different from internal versioning DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(17).setVersionType(VersionType.EXTERNAL).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.NOOP, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); // this should conflict with the delete command transaction which told us that the object was deleted at version 17. assertThrows( @@ -98,7 +98,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { // deleting with a lower version works. long v = randomIntBetween(12, 14); DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(v).setVersionType(VersionType.FORCE).get(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(v)); } @@ -133,7 +133,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { // Delete with a higher or equal version deletes all versions up to the given one. long v = randomIntBetween(14, 17); DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(v).setVersionType(VersionType.EXTERNAL_GTE).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(v)); // Deleting with a lower version keeps on failing after a delete. @@ -144,7 +144,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { // But delete with a higher version is OK. deleteResponse = client().prepareDelete("test", "type", "1").setVersion(18).setVersionType(VersionType.EXTERNAL_GTE).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.NOOP, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(18L)); } @@ -175,7 +175,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { // Delete with a higher version deletes all versions up to the given one. DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(17).setVersionType(VersionType.EXTERNAL).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(17L)); // Deleting with a lower version keeps on failing after a delete. @@ -186,7 +186,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { // But delete with a higher version is OK. deleteResponse = client().prepareDelete("test", "type", "1").setVersion(18).setVersionType(VersionType.EXTERNAL).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.NOOP, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(18L)); @@ -196,7 +196,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { deleteResponse = client().prepareDelete("test", "type", "1").setVersion(20).setVersionType(VersionType.EXTERNAL).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(20L)); // Make sure that the next delete will be GC. Note we do it on the index settings so it will be cleaned up @@ -281,7 +281,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { } DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(2).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(3L)); assertThrows(client().prepareDelete("test", "type", "1").setVersion(2).execute(), VersionConflictEngineException.class); @@ -290,7 +290,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { // This is intricate - the object was deleted but a delete transaction was with the right version. We add another one // and thus the transaction is increased. deleteResponse = client().prepareDelete("test", "type", "1").setVersion(3).execute().actionGet(); - assertEquals(DocWriteResponse.Operation.NOOP, deleteResponse.getOperation()); + assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); assertThat(deleteResponse.getVersion(), equalTo(4L)); } @@ -479,7 +479,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { sb.append(" version="); sb.append(deleteResponse.getVersion()); sb.append(" found="); - sb.append(deleteResponse.getOperation() == DocWriteResponse.Operation.DELETE); + sb.append(deleteResponse.getResult() == DocWriteResponse.Result.DELETED); } else if (response instanceof IndexResponse) { IndexResponse indexResponse = (IndexResponse) response; sb.append(" index="); @@ -491,7 +491,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { sb.append(" version="); sb.append(indexResponse.getVersion()); sb.append(" created="); - sb.append(indexResponse.getOperation() == DocWriteResponse.Operation.CREATE); + sb.append(indexResponse.getResult() == DocWriteResponse.Result.CREATED); } else { sb.append(" response: " + response); } diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 76096967de1..b5e27c89cde 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -57,7 +57,7 @@ $ cat requests { "index" : { "_index" : "test", "_type" : "type1", "_id" : "1" } } { "field1" : "value1" } $ curl -s -XPOST localhost:9200/_bulk --data-binary "@requests"; echo -{"took":7, "errors": false, "items":[{"index":{"_index":"test","_type":"type1","_id":"1","_version":1,"_operation":"create","forced_refresh":false}}]} +{"took":7, "errors": false, "items":[{"index":{"_index":"test","_type":"type1","_id":"1","_version":1,"result":"created","forced_refresh":false}}]} -------------------------------------------------- Because this format uses literal `\n`'s as delimiters, please be sure diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc index 2494605f87e..8f13478968a 100644 --- a/docs/reference/docs/delete.asciidoc +++ b/docs/reference/docs/delete.asciidoc @@ -26,7 +26,7 @@ The result of the above delete operation is: "_type" : "tweet", "_id" : "1", "_version" : 2, - "_operation: delete" + "result: deleted" } -------------------------------------------------- diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index f886ff7a977..8918c8cbaef 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -31,7 +31,7 @@ The result of the above index operation is: "_id" : "1", "_version" : 1, "created" : true, - "_operation" : create + "result" : created } -------------------------------------------------- // TESTRESPONSE[s/"successful" : 2/"successful" : 1/] @@ -231,7 +231,7 @@ The result of the above index operation is: "_id" : "6a8ca01c-7896-48e9-81cc-9f70661fcb32", "_version" : 1, "created" : true, - "_operation": "create" + "result": "created" } -------------------------------------------------- // TESTRESPONSE[s/6a8ca01c-7896-48e9-81cc-9f70661fcb32/$body._id/ s/"successful" : 2/"successful" : 1/] diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index f85e152f1b3..a0d64ed4e6e 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -133,7 +133,7 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ -------------------------------------------------- If `name` was `new_name` before the request was sent then the entire update -request is ignored. The `operation` element in the response returns `noop` if +request is ignored. The `result` element in the response returns `noop` if the request was ignored. [source,js] @@ -143,7 +143,7 @@ the request was ignored. "_type": "type1", "_id": "1", "_version": 1, - "_operation": noop + "result": noop } -------------------------------------------------- diff --git a/docs/reference/migration/migrate_5_0/java.asciidoc b/docs/reference/migration/migrate_5_0/java.asciidoc index f691784e47f..dab6097a487 100644 --- a/docs/reference/migration/migrate_5_0/java.asciidoc +++ b/docs/reference/migration/migrate_5_0/java.asciidoc @@ -341,7 +341,7 @@ as `setRefresh(true)` used to have. See `setRefreshPolicy`'s javadoc for more. Some Java APIs (e.g., `IndicesAdminClient#setSettings`) would support Java properties syntax (line-delimited key=value pairs). This support has been removed. -=== Render Search Template Java API has been removed +==== Render Search Template Java API has been removed The Render Search Template Java API including `RenderSearchTemplateAction`, `RenderSearchTemplateRequest` and `RenderSearchTemplateResponse` has been removed in favor of a new `simulate` option in the Search Template Java API. diff --git a/docs/reference/query-dsl/query-string-syntax.asciidoc b/docs/reference/query-dsl/query-string-syntax.asciidoc index 6755b9e9efe..9e847102469 100644 --- a/docs/reference/query-dsl/query-string-syntax.asciidoc +++ b/docs/reference/query-dsl/query-string-syntax.asciidoc @@ -38,10 +38,6 @@ search terms, but it is possible to specify other fields in the query syntax: book.\*:(quick brown) -* where the field `title` has no value (or is missing): - - _missing_:title - * where the field `title` has any non-null value: _exists_:title diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStats.java index 0019bfef5d6..81d0d0a4943 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/RunningStats.java @@ -78,6 +78,8 @@ public class RunningStats implements Writeable, Cloneable { @SuppressWarnings("unchecked") public RunningStats(StreamInput in) throws IOException { this(); + // read doc count + docCount = (Long)in.readGenericValue(); // read fieldSum fieldSum = (HashMap)in.readGenericValue(); // counts @@ -96,6 +98,8 @@ public class RunningStats implements Writeable, Cloneable { @Override public void writeTo(StreamOutput out) throws IOException { + // marshall doc count + out.writeGenericValue(docCount); // marshall fieldSum out.writeGenericValue(fieldSum); // counts diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java deleted file mode 100644 index f8cab2998dc..00000000000 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java +++ /dev/null @@ -1,629 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.messy.tests; - -import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.test.ESIntegTestCase; -import org.hamcrest.Matchers; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.equalTo; - -public class IndexLookupTests extends ESIntegTestCase { - String includeAllFlag = "_FREQUENCIES | _OFFSETS | _PAYLOADS | _POSITIONS | _CACHE"; - String includeAllWithoutRecordFlag = "_FREQUENCIES | _OFFSETS | _PAYLOADS | _POSITIONS "; - private HashMap> expectedEndOffsetsArray; - private HashMap> expectedPayloadsArray; - private HashMap> expectedPositionsArray; - private HashMap> emptyArray; - private HashMap> expectedStartOffsetsArray; - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); - } - - void initTestData() throws InterruptedException, ExecutionException, IOException { - emptyArray = new HashMap<>(); - List empty1 = new ArrayList<>(); - empty1.add(-1); - empty1.add(-1); - emptyArray.put("1", empty1); - List empty2 = new ArrayList<>(); - empty2.add(-1); - empty2.add(-1); - emptyArray.put("2", empty2); - List empty3 = new ArrayList<>(); - empty3.add(-1); - empty3.add(-1); - emptyArray.put("3", empty3); - - expectedPositionsArray = new HashMap<>(); - - List pos1 = new ArrayList<>(); - pos1.add(1); - pos1.add(2); - expectedPositionsArray.put("1", pos1); - List pos2 = new ArrayList<>(); - pos2.add(0); - pos2.add(1); - expectedPositionsArray.put("2", pos2); - List pos3 = new ArrayList<>(); - pos3.add(0); - pos3.add(4); - expectedPositionsArray.put("3", pos3); - - expectedPayloadsArray = new HashMap<>(); - List pay1 = new ArrayList<>(); - pay1.add(2); - pay1.add(3); - expectedPayloadsArray.put("1", pay1); - List pay2 = new ArrayList<>(); - pay2.add(1); - pay2.add(2); - expectedPayloadsArray.put("2", pay2); - List pay3 = new ArrayList<>(); - pay3.add(1); - pay3.add(-1); - expectedPayloadsArray.put("3", pay3); - /* - * "a|1 b|2 b|3 c|4 d " "b|1 b|2 c|3 d|4 a " "b|1 c|2 d|3 a|4 b " - */ - expectedStartOffsetsArray = new HashMap<>(); - List starts1 = new ArrayList<>(); - starts1.add(4); - starts1.add(8); - expectedStartOffsetsArray.put("1", starts1); - List starts2 = new ArrayList<>(); - starts2.add(0); - starts2.add(4); - expectedStartOffsetsArray.put("2", starts2); - List starts3 = new ArrayList<>(); - starts3.add(0); - starts3.add(16); - expectedStartOffsetsArray.put("3", starts3); - - expectedEndOffsetsArray = new HashMap<>(); - List ends1 = new ArrayList<>(); - ends1.add(7); - ends1.add(11); - expectedEndOffsetsArray.put("1", ends1); - List ends2 = new ArrayList<>(); - ends2.add(3); - ends2.add(7); - expectedEndOffsetsArray.put("2", ends2); - List ends3 = new ArrayList<>(); - ends3.add(3); - ends3.add(17); - expectedEndOffsetsArray.put("3", ends3); - - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("int_payload_field").field("type", "text").field("index_options", "offsets") - .field("analyzer", "payload_int").endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings( - Settings.builder() - .put(indexSettings()) - .put("index.analysis.analyzer.payload_int.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.payload_int.filter", "delimited_int") - .put("index.analysis.filter.delimited_int.delimiter", "|") - .put("index.analysis.filter.delimited_int.encoding", "int") - .put("index.analysis.filter.delimited_int.type", "delimited_payload_filter"))); - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("int_payload_field", "a|1 b|2 b|3 c|4 d "), client() - .prepareIndex("test", "type1", "2").setSource("int_payload_field", "b|1 b|2 c|3 d|4 a "), - client().prepareIndex("test", "type1", "3").setSource("int_payload_field", "b|1 c|2 d|3 a|4 b ")); - ensureGreen(); - } - - public void testTwoScripts() throws Exception { - initTestData(); - - // check term frequencies for 'a' - Script scriptFieldScript = new Script("term = _index['int_payload_field']['c']; term.tf()"); - scriptFieldScript = new Script("1"); - Script scoreScript = new Script("term = _index['int_payload_field']['b']; term.tf()"); - Map expectedResultsField = new HashMap<>(); - expectedResultsField.put("1", 1); - expectedResultsField.put("2", 1); - expectedResultsField.put("3", 1); - Map expectedResultsScore = new HashMap<>(); - expectedResultsScore.put("1", 2f); - expectedResultsScore.put("2", 2f); - expectedResultsScore.put("3", 2f); - checkOnlyFunctionScore(scoreScript, expectedResultsScore, 3); - checkValueInEachDocWithFunctionScore(scriptFieldScript, expectedResultsField, scoreScript, expectedResultsScore, 3); - - } - - public void testCallWithDifferentFlagsFails() throws Exception { - initTestData(); - - // should throw an exception, we cannot call with different flags twice - // if the flags of the second call were not included in the first call. - Script script = new Script("term = _index['int_payload_field']['b']; return _index['int_payload_field'].get('b', _POSITIONS).tf();"); - try { - client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script).execute().actionGet(); - } catch (SearchPhaseExecutionException e) { - assertThat( - "got: " + e.toString(), - e.toString() - .indexOf( - "You must call get with all required flags! Instead of _index['int_payload_field'].get('b', _FREQUENCIES) and _index['int_payload_field'].get('b', _POSITIONS) call _index['int_payload_field'].get('b', _FREQUENCIES | _POSITIONS) once]"), - Matchers.greaterThan(-1)); - } - - // Should not throw an exception this way round - script = new Script( - "term = _index['int_payload_field'].get('b', _POSITIONS | _FREQUENCIES);return _index['int_payload_field']['b'].tf();"); - client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script).execute().actionGet(); - } - - private void checkOnlyFunctionScore(Script scoreScript, Map expectedScore, int numExpectedDocs) { - SearchResponse sr = client().prepareSearch("test") - .setQuery(QueryBuilders.functionScoreQuery(ScoreFunctionBuilders.scriptFunction(scoreScript))).execute() - .actionGet(); - assertHitCount(sr, numExpectedDocs); - for (SearchHit hit : sr.getHits().getHits()) { - assertThat("for doc " + hit.getId(), ((Float) expectedScore.get(hit.getId())).doubleValue(), - Matchers.closeTo(hit.score(), 1.e-4)); - } - } - - public void testDocumentationExample() throws Exception { - initTestData(); - - Script script = new Script("term = _index['float_payload_field'].get('b'," + includeAllFlag - + "); payloadSum=0; for (pos in term) {payloadSum = pos.payloadAsInt(0)}; payloadSum"); - - // non existing field: sum should be 0 - HashMap zeroArray = new HashMap<>(); - zeroArray.put("1", 0); - zeroArray.put("2", 0); - zeroArray.put("3", 0); - checkValueInEachDoc(script, zeroArray, 3); - - script = new Script("term = _index['int_payload_field'].get('b'," + includeAllFlag - + "); payloadSum=0; for (pos in term) {payloadSum = payloadSum + pos.payloadAsInt(0)}; payloadSum"); - - // existing field: sums should be as here: - zeroArray.put("1", 5); - zeroArray.put("2", 3); - zeroArray.put("3", 1); - checkValueInEachDoc(script, zeroArray, 3); - } - - public void testIteratorAndRecording() throws Exception { - initTestData(); - - // call twice with record: should work as expected - Script script = createPositionsArrayScriptIterateTwice("b", includeAllFlag, "position"); - checkArrayValsInEachDoc(script, expectedPositionsArray, 3); - script = createPositionsArrayScriptIterateTwice("b", includeAllFlag, "startOffset"); - checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); - script = createPositionsArrayScriptIterateTwice("b", includeAllFlag, "endOffset"); - checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); - script = createPositionsArrayScriptIterateTwice("b", includeAllFlag, "payloadAsInt(-1)"); - checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); - - // no record and get iterator twice: should fail - script = createPositionsArrayScriptIterateTwice("b", includeAllWithoutRecordFlag, "position"); - checkExceptions(script); - script = createPositionsArrayScriptIterateTwice("b", includeAllWithoutRecordFlag, "startOffset"); - checkExceptions(script); - script = createPositionsArrayScriptIterateTwice("b", includeAllWithoutRecordFlag, "endOffset"); - checkExceptions(script); - script = createPositionsArrayScriptIterateTwice("b", includeAllWithoutRecordFlag, "payloadAsInt(-1)"); - checkExceptions(script); - - // no record and get termObject twice and iterate: should fail - script = createPositionsArrayScriptGetInfoObjectTwice("b", includeAllWithoutRecordFlag, "position"); - checkExceptions(script); - script = createPositionsArrayScriptGetInfoObjectTwice("b", includeAllWithoutRecordFlag, "startOffset"); - checkExceptions(script); - script = createPositionsArrayScriptGetInfoObjectTwice("b", includeAllWithoutRecordFlag, "endOffset"); - checkExceptions(script); - script = createPositionsArrayScriptGetInfoObjectTwice("b", includeAllWithoutRecordFlag, "payloadAsInt(-1)"); - checkExceptions(script); - - } - - private Script createPositionsArrayScriptGetInfoObjectTwice(String term, String flags, String what) { - String script = "term = _index['int_payload_field'].get('" + term + "'," + flags - + "); array=[]; for (pos in term) {array.add(pos." + what + ")}; _index['int_payload_field'].get('" + term + "'," - + flags + "); array=[]; for (pos in term) {array.add(pos." + what + ")}"; - return new Script(script); - } - - private Script createPositionsArrayScriptIterateTwice(String term, String flags, String what) { - String script = "term = _index['int_payload_field'].get('" + term + "'," + flags - + "); array=[]; for (pos in term) {array.add(pos." + what + ")}; array=[]; for (pos in term) {array.add(pos." + what - + ")}; array"; - return new Script(script); - } - - private Script createPositionsArrayScript(String field, String term, String flags, String what) { - String script = "term = _index['" + field + "'].get('" + term + "'," + flags - + "); array=[]; for (pos in term) {array.add(pos." + what + ")}; array"; - return new Script(script); - } - - private Script createPositionsArrayScriptDefaultGet(String field, String term, String what) { - String script = "term = _index['" + field + "']['" + term + "']; array=[]; for (pos in term) {array.add(pos." + what - + ")}; array"; - return new Script(script); - } - - public void testFlags() throws Exception { - initTestData(); - - // check default flag - Script script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "position"); - // there should be no positions - /* TODO: the following tests fail with the new postings enum apis because of a bogus assert in BlockDocsEnum - checkArrayValsInEachDoc(script, emptyArray, 3); - script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "startOffset"); - // there should be no offsets - checkArrayValsInEachDoc(script, emptyArray, 3); - script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "endOffset"); - // there should be no offsets - checkArrayValsInEachDoc(script, emptyArray, 3); - script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "payloadAsInt(-1)"); - // there should be no payload - checkArrayValsInEachDoc(script, emptyArray, 3); - - // check FLAG_FREQUENCIES flag - script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "position"); - // there should be no positions - checkArrayValsInEachDoc(script, emptyArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "startOffset"); - // there should be no offsets - checkArrayValsInEachDoc(script, emptyArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "endOffset"); - // there should be no offsets - checkArrayValsInEachDoc(script, emptyArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "payloadAsInt(-1)"); - // there should be no payloads - checkArrayValsInEachDoc(script, emptyArray, 3);*/ - - // check FLAG_POSITIONS flag - script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "position"); - // there should be positions - checkArrayValsInEachDoc(script, expectedPositionsArray, 3); - /* TODO: these tests make a bogus assumption that asking for positions will return only positions - script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "startOffset"); - // there should be no offsets - checkArrayValsInEachDoc(script, emptyArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "endOffset"); - // there should be no offsets - checkArrayValsInEachDoc(script, emptyArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "payloadAsInt(-1)"); - // there should be no payloads - checkArrayValsInEachDoc(script, emptyArray, 3);*/ - - // check FLAG_OFFSETS flag - script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "position"); - // there should be positions and s forth ... - checkArrayValsInEachDoc(script, expectedPositionsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "startOffset"); - checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "endOffset"); - checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "payloadAsInt(-1)"); - checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); - - // check FLAG_PAYLOADS flag - script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "position"); - checkArrayValsInEachDoc(script, expectedPositionsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "startOffset"); - checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "endOffset"); - checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "payloadAsInt(-1)"); - checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); - - // check all flags - String allFlags = "_POSITIONS | _OFFSETS | _PAYLOADS"; - script = createPositionsArrayScript("int_payload_field", "b", allFlags, "position"); - checkArrayValsInEachDoc(script, expectedPositionsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", allFlags, "startOffset"); - checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", allFlags, "endOffset"); - checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", allFlags, "payloadAsInt(-1)"); - checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); - - // check all flags without record - script = createPositionsArrayScript("int_payload_field", "b", includeAllWithoutRecordFlag, "position"); - checkArrayValsInEachDoc(script, expectedPositionsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", includeAllWithoutRecordFlag, "startOffset"); - checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", includeAllWithoutRecordFlag, "endOffset"); - checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3); - script = createPositionsArrayScript("int_payload_field", "b", includeAllWithoutRecordFlag, "payloadAsInt(-1)"); - checkArrayValsInEachDoc(script, expectedPayloadsArray, 3); - - } - - private void checkArrayValsInEachDoc(Script script, HashMap> expectedArray, int expectedHitSize) { - SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script) - .execute().actionGet(); - assertHitCount(sr, expectedHitSize); - int nullCounter = 0; - for (SearchHit hit : sr.getHits().getHits()) { - Object result = hit.getFields().get("tvtest").getValues(); - Object expectedResult = expectedArray.get(hit.getId()); - assertThat("for doc " + hit.getId(), result, equalTo(expectedResult)); - if (expectedResult != null) { - nullCounter++; - } - } - assertThat(nullCounter, equalTo(expectedArray.size())); - } - - public void testAllExceptPosAndOffset() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("float_payload_field").field("type", "text").field("index_options", "offsets").field("term_vector", "no") - .field("analyzer", "payload_float").endObject().startObject("string_payload_field").field("type", "text") - .field("index_options", "offsets").field("term_vector", "no").field("analyzer", "payload_string").endObject() - .startObject("int_payload_field").field("type", "text").field("index_options", "offsets") - .field("analyzer", "payload_int").endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings( - Settings.builder() - .put(indexSettings()) - .put("index.analysis.analyzer.payload_float.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.payload_float.filter", "delimited_float") - .put("index.analysis.filter.delimited_float.delimiter", "|") - .put("index.analysis.filter.delimited_float.encoding", "float") - .put("index.analysis.filter.delimited_float.type", "delimited_payload_filter") - .put("index.analysis.analyzer.payload_string.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.payload_string.filter", "delimited_string") - .put("index.analysis.filter.delimited_string.delimiter", "|") - .put("index.analysis.filter.delimited_string.encoding", "identity") - .put("index.analysis.filter.delimited_string.type", "delimited_payload_filter") - .put("index.analysis.analyzer.payload_int.tokenizer", "whitespace") - .putArray("index.analysis.analyzer.payload_int.filter", "delimited_int") - .put("index.analysis.filter.delimited_int.delimiter", "|") - .put("index.analysis.filter.delimited_int.encoding", "int") - .put("index.analysis.filter.delimited_int.type", "delimited_payload_filter") - .put("index.number_of_shards", 1))); - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("float_payload_field", "a|1 b|2 a|3 b "), client() - .prepareIndex("test", "type1", "2").setSource("string_payload_field", "a|a b|b a|a b "), - client().prepareIndex("test", "type1", "3").setSource("float_payload_field", "a|4 b|5 a|6 b "), - client().prepareIndex("test", "type1", "4").setSource("string_payload_field", "a|b b|a a|b b "), - client().prepareIndex("test", "type1", "5").setSource("float_payload_field", "c "), - client().prepareIndex("test", "type1", "6").setSource("int_payload_field", "c|1")); - - // get the number of all docs - Script script = new Script("_index.numDocs()"); - checkValueInEachDoc(6, script, 6); - - // get the number of docs with field float_payload_field - script = new Script("_index['float_payload_field'].docCount()"); - checkValueInEachDoc(3, script, 6); - - // corner case: what if the field does not exist? - script = new Script("_index['non_existent_field'].docCount()"); - checkValueInEachDoc(0, script, 6); - - // get the number of all tokens in all docs - script = new Script("_index['float_payload_field'].sumttf()"); - checkValueInEachDoc(9, script, 6); - - // corner case get the number of all tokens in all docs for non existent - // field - script = new Script("_index['non_existent_field'].sumttf()"); - checkValueInEachDoc(0, script, 6); - - // get the sum of doc freqs in all docs - script = new Script("_index['float_payload_field'].sumdf()"); - checkValueInEachDoc(5, script, 6); - - // get the sum of doc freqs in all docs for non existent field - script = new Script("_index['non_existent_field'].sumdf()"); - checkValueInEachDoc(0, script, 6); - - // check term frequencies for 'a' - script = new Script("term = _index['float_payload_field']['a']; if (term != null) {term.tf()}"); - Map expectedResults = new HashMap<>(); - expectedResults.put("1", 2); - expectedResults.put("2", 0); - expectedResults.put("3", 2); - expectedResults.put("4", 0); - expectedResults.put("5", 0); - expectedResults.put("6", 0); - checkValueInEachDoc(script, expectedResults, 6); - expectedResults.clear(); - - // check doc frequencies for 'c' - script = new Script("term = _index['float_payload_field']['c']; if (term != null) {term.df()}"); - expectedResults.put("1", 1L); - expectedResults.put("2", 1L); - expectedResults.put("3", 1L); - expectedResults.put("4", 1L); - expectedResults.put("5", 1L); - expectedResults.put("6", 1L); - checkValueInEachDoc(script, expectedResults, 6); - expectedResults.clear(); - - // check doc frequencies for term that does not exist - script = new Script("term = _index['float_payload_field']['non_existent_term']; if (term != null) {term.df()}"); - expectedResults.put("1", 0L); - expectedResults.put("2", 0L); - expectedResults.put("3", 0L); - expectedResults.put("4", 0L); - expectedResults.put("5", 0L); - expectedResults.put("6", 0L); - checkValueInEachDoc(script, expectedResults, 6); - expectedResults.clear(); - - // check doc frequencies for term that does not exist - script = new Script("term = _index['non_existent_field']['non_existent_term']; if (term != null) {term.tf()}"); - expectedResults.put("1", 0); - expectedResults.put("2", 0); - expectedResults.put("3", 0); - expectedResults.put("4", 0); - expectedResults.put("5", 0); - expectedResults.put("6", 0); - checkValueInEachDoc(script, expectedResults, 6); - expectedResults.clear(); - - // check total term frequencies for 'a' - script = new Script("term = _index['float_payload_field']['a']; if (term != null) {term.ttf()}"); - expectedResults.put("1", 4L); - expectedResults.put("2", 4L); - expectedResults.put("3", 4L); - expectedResults.put("4", 4L); - expectedResults.put("5", 4L); - expectedResults.put("6", 4L); - checkValueInEachDoc(script, expectedResults, 6); - expectedResults.clear(); - - // check float payload for 'b' - HashMap> expectedPayloadsArray = new HashMap<>(); - script = createPositionsArrayScript("float_payload_field", "b", includeAllFlag, "payloadAsFloat(-1)"); - float missingValue = -1; - List payloadsFor1 = new ArrayList<>(); - payloadsFor1.add(2f); - payloadsFor1.add(missingValue); - expectedPayloadsArray.put("1", payloadsFor1); - List payloadsFor2 = new ArrayList<>(); - payloadsFor2.add(5f); - payloadsFor2.add(missingValue); - expectedPayloadsArray.put("3", payloadsFor2); - expectedPayloadsArray.put("6", new ArrayList<>()); - expectedPayloadsArray.put("5", new ArrayList<>()); - expectedPayloadsArray.put("4", new ArrayList<>()); - expectedPayloadsArray.put("2", new ArrayList<>()); - checkArrayValsInEachDoc(script, expectedPayloadsArray, 6); - - // check string payload for 'b' - expectedPayloadsArray.clear(); - payloadsFor1.clear(); - payloadsFor2.clear(); - script = createPositionsArrayScript("string_payload_field", "b", includeAllFlag, "payloadAsString()"); - payloadsFor1.add("b"); - payloadsFor1.add(null); - expectedPayloadsArray.put("2", payloadsFor1); - payloadsFor2.add("a"); - payloadsFor2.add(null); - expectedPayloadsArray.put("4", payloadsFor2); - expectedPayloadsArray.put("6", new ArrayList<>()); - expectedPayloadsArray.put("5", new ArrayList<>()); - expectedPayloadsArray.put("3", new ArrayList<>()); - expectedPayloadsArray.put("1", new ArrayList<>()); - checkArrayValsInEachDoc(script, expectedPayloadsArray, 6); - - // check int payload for 'c' - expectedPayloadsArray.clear(); - payloadsFor1.clear(); - payloadsFor2.clear(); - script = createPositionsArrayScript("int_payload_field", "c", includeAllFlag, "payloadAsInt(-1)"); - payloadsFor1 = new ArrayList<>(); - payloadsFor1.add(1); - expectedPayloadsArray.put("6", payloadsFor1); - expectedPayloadsArray.put("5", new ArrayList<>()); - expectedPayloadsArray.put("4", new ArrayList<>()); - expectedPayloadsArray.put("3", new ArrayList<>()); - expectedPayloadsArray.put("2", new ArrayList<>()); - expectedPayloadsArray.put("1", new ArrayList<>()); - checkArrayValsInEachDoc(script, expectedPayloadsArray, 6); - - } - - private void checkExceptions(Script script) { - try { - SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script) - .execute().actionGet(); - assertThat(sr.getHits().hits().length, equalTo(0)); - ShardSearchFailure[] shardFails = sr.getShardFailures(); - for (ShardSearchFailure fail : shardFails) { - assertThat(fail.reason().indexOf("Cannot iterate twice! If you want to iterate more that once, add _CACHE explicitly."), - Matchers.greaterThan(-1)); - } - } catch (SearchPhaseExecutionException ex) { - assertThat( - "got " + ex.toString(), - ex.toString().indexOf("Cannot iterate twice! If you want to iterate more that once, add _CACHE explicitly."), - Matchers.greaterThan(-1)); - } - } - - private void checkValueInEachDocWithFunctionScore(Script fieldScript, Map expectedFieldVals, Script scoreScript, - Map expectedScore, int numExpectedDocs) { - SearchResponse sr = client().prepareSearch("test") - .setQuery(QueryBuilders.functionScoreQuery(ScoreFunctionBuilders.scriptFunction(scoreScript))) - .addScriptField("tvtest", fieldScript).execute().actionGet(); - assertHitCount(sr, numExpectedDocs); - for (SearchHit hit : sr.getHits().getHits()) { - Object result = hit.getFields().get("tvtest").getValues().get(0); - Object expectedResult = expectedFieldVals.get(hit.getId()); - assertThat("for doc " + hit.getId(), result, equalTo(expectedResult)); - assertThat("for doc " + hit.getId(), ((Float) expectedScore.get(hit.getId())).doubleValue(), - Matchers.closeTo(hit.score(), 1.e-4)); - } - } - - private void checkValueInEachDoc(Script script, Map expectedResults, int numExpectedDocs) { - SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script) - .execute().actionGet(); - assertHitCount(sr, numExpectedDocs); - for (SearchHit hit : sr.getHits().getHits()) { - Object result = hit.getFields().get("tvtest").getValues().get(0); - Object expectedResult = expectedResults.get(hit.getId()); - assertThat("for doc " + hit.getId(), result, equalTo(expectedResult)); - } - } - - private void checkValueInEachDoc(int value, Script script, int numExpectedDocs) { - SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script) - .execute().actionGet(); - assertHitCount(sr, numExpectedDocs); - for (SearchHit hit : sr.getHits().getHits()) { - Object result = hit.getFields().get("tvtest").getValues().get(0); - if (result instanceof Integer) { - assertThat((Integer)result, equalTo(value)); - } else if (result instanceof Long) { - assertThat(((Long) result).intValue(), equalTo(value)); - } else { - fail(); - } - } - } -} diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java deleted file mode 100644 index 78d95edecff..00000000000 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ /dev/null @@ -1,334 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.messy.tests; - - -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; -import org.elasticsearch.search.sort.ScriptSortBuilder; -import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; -import org.elasticsearch.search.sort.SortBuilders; -import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Random; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; - -/** - * - */ -public class SimpleSortTests extends ESIntegTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class); - } - - public void testSimpleSorts() throws Exception { - Random random = random(); - assertAcked(prepareCreate("test") - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("str_value").field("type", "keyword").endObject() - .startObject("boolean_value").field("type", "boolean").endObject() - .startObject("byte_value").field("type", "byte").endObject() - .startObject("short_value").field("type", "short").endObject() - .startObject("integer_value").field("type", "integer").endObject() - .startObject("long_value").field("type", "long").endObject() - .startObject("float_value").field("type", "float").endObject() - .startObject("double_value").field("type", "double").endObject() - .endObject().endObject().endObject())); - ensureGreen(); - List builders = new ArrayList<>(); - for (int i = 0; i < 10; i++) { - IndexRequestBuilder builder = client().prepareIndex("test", "type1", Integer.toString(i)).setSource(jsonBuilder().startObject() - .field("str_value", new String(new char[]{(char) (97 + i), (char) (97 + i)})) - .field("boolean_value", true) - .field("byte_value", i) - .field("short_value", i) - .field("integer_value", i) - .field("long_value", i) - .field("float_value", 0.1 * i) - .field("double_value", 0.1 * i) - .endObject()); - builders.add(builder); - } - Collections.shuffle(builders, random); - for (IndexRequestBuilder builder : builders) { - builder.execute().actionGet(); - if (random.nextBoolean()) { - if (random.nextInt(5) != 0) { - refresh(); - } else { - client().admin().indices().prepareFlush().execute().actionGet(); - } - } - - } - refresh(); - - // STRING script - int size = 1 + random.nextInt(10); - - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .setSize(size) - .addSort(new ScriptSortBuilder(new Script("doc['str_value'].value"), ScriptSortType.STRING)).execute().actionGet(); - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().hits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i))); - assertThat(searchResponse.getHits().getAt(i).sortValues()[0].toString(), equalTo(new String(new char[] { (char) (97 + i), - (char) (97 + i) }))); - } - size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).execute() - .actionGet(); - - assertHitCount(searchResponse, 10); - assertThat(searchResponse.getHits().hits().length, equalTo(size)); - for (int i = 0; i < size; i++) { - assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i))); - assertThat(searchResponse.getHits().getAt(i).sortValues()[0].toString(), equalTo(new String(new char[] { (char) (97 + (9 - i)), - (char) (97 + (9 - i)) }))); - } - - assertThat(searchResponse.toString(), not(containsString("error"))); - - assertNoFailures(searchResponse); - } - - public void testSortMinValueScript() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("lvalue").field("type", "long").endObject() - .startObject("dvalue").field("type", "double").endObject() - .startObject("svalue").field("type", "keyword").endObject() - .startObject("gvalue").field("type", "geo_point").endObject() - .endObject().endObject().endObject().string(); - assertAcked(prepareCreate("test").addMapping("type1", mapping)); - ensureGreen(); - - for (int i = 0; i < 10; i++) { - IndexRequestBuilder req = client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder().startObject() - .field("ord", i) - .field("svalue", new String[]{"" + i, "" + (i + 1), "" + (i + 2)}) - .field("lvalue", new long[]{i, i + 1, i + 2}) - .field("dvalue", new double[]{i, i + 1, i + 2}) - .startObject("gvalue") - .field("lat", (double) i + 1) - .field("lon", (double) i) - .endObject() - .endObject()); - req.execute().actionGet(); - } - - for (int i = 10; i < 20; i++) { // add some docs that don't have values in those fields - client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder().startObject() - .field("ord", i) - .endObject()).execute().actionGet(); - } - client().admin().indices().prepareRefresh("test").execute().actionGet(); - - // test the long values - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addScriptField("min", new Script("retval = Long.MAX_VALUE; for (v in doc['lvalue'].values){ retval = min(v, retval) }; retval")) - .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")).setSize(10) - .execute().actionGet(); - - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L)); - for (int i = 0; i < 10; i++) { - assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Long) searchResponse.getHits().getAt(i).field("min").value(), equalTo((long) i)); - } - // test the double values - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addScriptField("min", new Script("retval = Double.MAX_VALUE; for (v in doc['dvalue'].values){ retval = min(v, retval) }; retval")) - .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")).setSize(10) - .execute().actionGet(); - - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L)); - for (int i = 0; i < 10; i++) { - assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), equalTo((double) i)); - } - - // test the string values - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addScriptField("min", new Script("retval = Integer.MAX_VALUE; for (v in doc['svalue'].values){ retval = min(Integer.parseInt(v), retval) }; retval")) - .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")).setSize(10) - .execute().actionGet(); - - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L)); - for (int i = 0; i < 10; i++) { - assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Integer) searchResponse.getHits().getAt(i).field("min").value(), equalTo(i)); - } - - // test the geopoint values - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addScriptField("min", new Script("retval = Double.MAX_VALUE; for (v in doc['gvalue'].values){ retval = min(v.lon, retval) }; retval")) - .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")).setSize(10) - .execute().actionGet(); - - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L)); - for (int i = 0; i < 10; i++) { - assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo(i, GeoUtils.TOLERANCE)); - } - } - - public void testDocumentsWithNullValue() throws Exception { - // TODO: sort shouldn't fail when sort field is mapped dynamically - // We have to specify mapping explicitly because by the time search is performed dynamic mapping might not - // be propagated to all nodes yet and sort operation fail when the sort field is not defined - String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("id").field("type", "keyword").endObject() - .startObject("svalue").field("type", "keyword").endObject() - .endObject().endObject().endObject().string(); - assertAcked(prepareCreate("test").addMapping("type1", mapping)); - ensureGreen(); - - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject() - .field("id", "1") - .field("svalue", "aaa") - .endObject()).execute().actionGet(); - - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject() - .field("id", "2") - .nullField("svalue") - .endObject()).execute().actionGet(); - - client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject() - .field("id", "3") - .field("svalue", "bbb") - .endObject()).execute().actionGet(); - - - flush(); - refresh(); - - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addScriptField("id", new Script("doc['id'].value")) - .addSort("svalue", SortOrder.ASC) - .execute().actionGet(); - - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2")); - - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addScriptField("id", new Script("doc['id'].values[0]")) - .addSort("svalue", SortOrder.ASC) - .execute().actionGet(); - - assertNoFailures(searchResponse); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2")); - - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addScriptField("id", new Script("doc['id'].value")) - .addSort("svalue", SortOrder.DESC) - .execute().actionGet(); - - if (searchResponse.getFailedShards() > 0) { - logger.warn("Failed shards:"); - for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { - logger.warn("-> {}", shardSearchFailure); - } - } - assertThat(searchResponse.getFailedShards(), equalTo(0)); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L)); - assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("3")); - assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("1")); - assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2")); - - // a query with docs just with null values - searchResponse = client().prepareSearch() - .setQuery(termQuery("id", "2")) - .addScriptField("id", new Script("doc['id'].value")) - .addSort("svalue", SortOrder.DESC) - .execute().actionGet(); - - if (searchResponse.getFailedShards() > 0) { - logger.warn("Failed shards:"); - for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { - logger.warn("-> {}", shardSearchFailure); - } - } - assertThat(searchResponse.getFailedShards(), equalTo(0)); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("2")); - } - - public void test2920() throws IOException { - assertAcked(prepareCreate("test").addMapping( - "test", - jsonBuilder().startObject().startObject("test").startObject("properties").startObject("value").field("type", "keyword") - .endObject().endObject().endObject().endObject())); - ensureGreen(); - for (int i = 0; i < 10; i++) { - client().prepareIndex("test", "test", Integer.toString(i)) - .setSource(jsonBuilder().startObject().field("value", "" + i).endObject()).execute().actionGet(); - } - refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.scriptSort(new Script("\u0027\u0027"), ScriptSortType.STRING)).setSize(10).execute().actionGet(); - assertNoFailures(searchResponse); - } -} diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java deleted file mode 100644 index 383e7dd6028..00000000000 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * This package contains tests that use groovy to test what looks - * to be unrelated functionality, or functionality that should be - * tested with a mock instead. Instead of doing an epic battle - * with these tests, they are temporarily moved here to the groovy - * plugin's tests, but that is likely not where they belong. Please - * help by cleaning them up and we can remove this package! - * - *
    - *
  • If the test is actually testing groovy specifically, move to - * the org.elasticsearch.script.groovy tests package of this plugin
  • - *
  • If the test is testing scripting integration with another core subsystem, - * fix it to use a mock instead, so it can be in the core tests again
  • - *
  • If the test is just being lazy, and does not really need scripting to test - * something, clean it up!
  • - *
- */ -/* List of renames that took place: - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/AvgTests.java - renamed: core/src/test/java/org/elasticsearch/document/BulkIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java - renamed: core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ChildQuerySearchTests.java - renamed: core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java - ^^^^^ note: the methods from this test using mustache were moved to the mustache module under its messy tests package. - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateHistogramTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java - renamed: core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java - renamed: core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java - renamed: core/src/test/java/org/elasticsearch/script/IndexLookupIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java - renamed: core/src/test/java/org/elasticsearch/script/IndexedScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java - renamed: core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/InnerHitsTests.java - renamed: core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/PercolatorTests.java - renamed: core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java - renamed: core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java - renamed: core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java - renamed: core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java - renamed: core/src/test/java/org/elasticsearch/search/timeout/SearchTimeoutIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchTimeoutTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SignificantTermsSignificanceScoreTests.java - renamed: core/src/test/java/org/elasticsearch/nested/SimpleNestedIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleNestedTests.java - renamed: core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SumTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TopHitsTests.java - renamed: core/src/test/java/org/elasticsearch/index/mapper/TransformOnIndexMapperIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TransformOnIndexMapperTests.java - renamed: core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptCompilationException.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java - renamed: core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java - renamed: core/src/test/java/org/elasticsearch/script/GroovySecurityIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java - renamed: core/src/test/resources/org/elasticsearch/search/aggregations/metrics/scripted/conf/scripts/combine_script.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy - renamed: core/src/test/resources/org/elasticsearch/search/aggregations/metrics/scripted/conf/scripts/init_script.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy - renamed: core/src/test/resources/org/elasticsearch/search/aggregations/metrics/scripted/conf/scripts/map_script.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy - renamed: core/src/test/resources/org/elasticsearch/search/aggregations/metrics/scripted/conf/scripts/reduce_script.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy - renamed: core/src/test/resources/org/elasticsearch/search/aggregations/bucket/config/scripts/significance_script_no_params.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy - renamed: core/src/test/resources/org/elasticsearch/search/aggregations/bucket/config/scripts/significance_script_with_params.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy - */ -package org.elasticsearch.messy.tests; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyIndexedScriptTests.java similarity index 98% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java rename to modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyIndexedScriptTests.java index 623d2cf155c..be307c690f9 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyIndexedScriptTests.java @@ -18,7 +18,7 @@ */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.script.groovy; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -51,7 +51,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -public class IndexedScriptTests extends ESIntegTestCase { +public class GroovyIndexedScriptTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { return Collections.singleton(GroovyPlugin.class); diff --git a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy deleted file mode 100644 index da13f608757..00000000000 --- a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy +++ /dev/null @@ -1 +0,0 @@ -newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation \ No newline at end of file diff --git a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy deleted file mode 100644 index 6cf4f40b6d2..00000000000 --- a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy +++ /dev/null @@ -1 +0,0 @@ -vars.multiplier = 3 \ No newline at end of file diff --git a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy deleted file mode 100644 index aece1a7d84d..00000000000 --- a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy +++ /dev/null @@ -1 +0,0 @@ -_agg.add(vars.multiplier) \ No newline at end of file diff --git a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy deleted file mode 100644 index 835dcfbb3a8..00000000000 --- a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy +++ /dev/null @@ -1 +0,0 @@ -newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation \ No newline at end of file diff --git a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy deleted file mode 100644 index 7178e05efbd..00000000000 --- a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy +++ /dev/null @@ -1 +0,0 @@ -return _subset_freq + _subset_size + _superset_freq + _superset_size diff --git a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy b/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy deleted file mode 100644 index 0099a531fd2..00000000000 --- a/modules/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy +++ /dev/null @@ -1 +0,0 @@ -return param*(_subset_freq + _subset_size + _superset_freq + _superset_size)/param diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java index 96d6ce28dc1..dd5c0a18328 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -33,16 +34,19 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.test.AbstractQueryTestCase; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.function.Function; public class TemplateQueryBuilderTests extends AbstractQueryTestCase { @@ -53,7 +57,39 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { - return Collections.singleton(MustachePlugin.class); + return Arrays.asList(MustachePlugin.class, CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + @SuppressWarnings("unchecked") + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("{ \"match_all\" : {}}", + s -> new BytesArray("{ \"match_all\" : {}}")); + + scripts.put("{ \"match_all\" : {\"_name\" : \"foobar\"}}", + s -> new BytesArray("{ \"match_all\" : {\"_name\" : \"foobar\"}}")); + + scripts.put("{\n" + + " \"term\" : {\n" + + " \"foo\" : {\n" + + " \"value\" : \"bar\",\n" + + " \"boost\" : 2.0\n" + + " }\n" + + " }\n" + + "}", s -> new BytesArray("{\n" + + " \"term\" : {\n" + + " \"foo\" : {\n" + + " \"value\" : \"bar\",\n" + + " \"boost\" : 2.0\n" + + " }\n" + + " }\n" + + "}")); + return scripts; + } } @Before @@ -68,7 +104,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase indices, MetaData clusterMetadata) { + public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { try { if (!blobStore.doesContainerExist(blobStore.container())) { logger.debug("container [{}] does not exist. Creating...", blobStore.container()); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java index 4ed365b5ac8..51b5eae57ae 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java @@ -21,22 +21,19 @@ package org.elasticsearch.cloud.azure.storage; import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; +import java.nio.file.NoSuchFileException; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -82,7 +79,7 @@ public class AzureStorageServiceMock extends AbstractComponent implements AzureS @Override public InputStream getInputStream(String account, LocationMode mode, String container, String blob) throws IOException { if (!blobExists(account, mode, container, blob)) { - throw new FileNotFoundException("missing blob [" + blob + "]"); + throw new NoSuchFileException("missing blob [" + blob + "]"); } return new ByteArrayInputStream(blobs.get(blob).toByteArray()); } @@ -99,13 +96,13 @@ public class AzureStorageServiceMock extends AbstractComponent implements AzureS MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); for (String blobName : blobs.keySet()) { final String checkBlob; - if (keyPath != null) { + if (keyPath != null && !keyPath.isEmpty()) { // strip off key path from the beginning of the blob name checkBlob = blobName.replace(keyPath, ""); } else { checkBlob = blobName; } - if (startsWithIgnoreCase(checkBlob, prefix)) { + if (prefix == null || startsWithIgnoreCase(checkBlob, prefix)) { blobsBuilder.put(blobName, new PlainBlobMetaData(checkBlob, blobs.get(blobName).size())); } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java new file mode 100644 index 00000000000..85ca44205aa --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.azure; + +import com.microsoft.azure.storage.StorageException; +import org.elasticsearch.cloud.azure.blobstore.AzureBlobStore; +import org.elasticsearch.cloud.azure.storage.AzureStorageServiceMock; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; + +import java.io.IOException; +import java.net.URISyntaxException; + +public class AzureBlobStoreContainerTests extends ESBlobStoreContainerTestCase { + @Override + protected BlobStore newBlobStore() throws IOException { + try { + RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY); + AzureStorageServiceMock client = new AzureStorageServiceMock(); + return new AzureBlobStore(repositoryMetaData, Settings.EMPTY, client); + } catch (URISyntaxException | StorageException e) { + throw new IOException(e); + } + } +} diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java index 6ff5aa41819..b63d03487b2 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java @@ -41,9 +41,9 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; +import java.nio.file.NoSuchFileException; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; @@ -196,7 +196,7 @@ public class GoogleCloudStorageBlobStore extends AbstractComponent implements Bl } catch (GoogleJsonResponseException e) { GoogleJsonError error = e.getDetails(); if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - throw new FileNotFoundException(e.getMessage()); + throw new NoSuchFileException(e.getMessage()); } throw e; } @@ -227,6 +227,9 @@ public class GoogleCloudStorageBlobStore extends AbstractComponent implements Bl * @param blobName name of the blob */ void deleteBlob(String blobName) throws IOException { + if (!blobExists(blobName)) { + throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); + } doPrivileged(() -> client.objects().delete(bucket, blobName).execute()); } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index a22178315f7..eac97b97b81 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -32,9 +32,9 @@ import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.repositories.hdfs.HdfsBlobStore.Operation; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; +import java.nio.file.NoSuchFileException; import java.util.Collections; import java.util.EnumSet; import java.util.LinkedHashMap; @@ -68,16 +68,16 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { - try { - store.execute(new Operation() { - @Override - public Boolean run(FileContext fileContext) throws IOException { - return fileContext.delete(new Path(path, blobName), true); - } - }); - } catch (FileNotFoundException ok) { - // behaves like Files.deleteIfExists + if (!blobExists(blobName)) { + throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } + + store.execute(new Operation() { + @Override + public Boolean run(FileContext fileContext) throws IOException { + return fileContext.delete(new Path(path, blobName), true); + } + }); } @Override @@ -93,6 +93,9 @@ final class HdfsBlobContainer extends AbstractBlobContainer { @Override public InputStream readBlob(String blobName) throws IOException { + if (!blobExists(blobName)) { + throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); + } // FSDataInputStream does buffering internally return store.execute(new Operation() { @Override diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java new file mode 100644 index 00000000000..cdc6dd96803 --- /dev/null +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.hdfs; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.AbstractFileSystem; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.UnsupportedFileSystemException; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; + +import javax.security.auth.Subject; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.net.URI; +import java.net.URISyntaxException; +import java.security.AccessController; +import java.security.Principal; +import java.security.PrivilegedAction; +import java.util.Collections; + +public class HdfsBlobStoreContainerTests extends ESBlobStoreContainerTestCase { + + @Override + protected BlobStore newBlobStore() throws IOException { + return AccessController.doPrivileged( + new PrivilegedAction() { + @Override + public HdfsBlobStore run() { + try { + FileContext fileContext = createContext(new URI("hdfs:///")); + return new HdfsBlobStore(fileContext, "temp", 1024); + } catch (IOException | URISyntaxException e) { + throw new RuntimeException(e); + } + } + }); + } + + @SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)") + private FileContext createContext(URI uri) { + // mirrors HdfsRepository.java behaviour + Configuration cfg = new Configuration(true); + cfg.setClassLoader(HdfsRepository.class.getClassLoader()); + cfg.reloadConfiguration(); + + Constructor ctor; + Subject subject; + + try { + Class clazz = Class.forName("org.apache.hadoop.security.User"); + ctor = clazz.getConstructor(String.class); + ctor.setAccessible(true); + } catch (ClassNotFoundException | NoSuchMethodException e) { + throw new RuntimeException(e); + } + + try { + Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name")); + subject = new Subject(false, Collections.singleton(principal), + Collections.emptySet(), Collections.emptySet()); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); + } + + // disable file system cache + cfg.setBoolean("fs.hdfs.impl.disable.cache", true); + + // set file system to TestingFs to avoid a bunch of security + // checks, similar to what is done in HdfsTests.java + cfg.set("fs.AbstractFileSystem." + uri.getScheme() + ".impl", TestingFs.class.getName()); + + // create the FileContext with our user + return Subject.doAs(subject, new PrivilegedAction() { + @Override + public FileContext run() { + try { + TestingFs fs = (TestingFs) AbstractFileSystem.get(uri, cfg); + return FileContext.getFileContext(fs, cfg); + } catch (UnsupportedFileSystemException e) { + throw new RuntimeException(e); + } + } + }); + } +} diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java index ea71dc152f9..5659b2df1c8 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java @@ -37,10 +37,10 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.Streams; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.file.NoSuchFileException; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; @@ -89,7 +89,7 @@ public class S3BlobContainer extends AbstractBlobContainer { } else { if (e instanceof AmazonS3Exception) { if (404 == ((AmazonS3Exception) e).getStatusCode()) { - throw new FileNotFoundException("Blob object [" + blobName + "] not found: " + e.getMessage()); + throw new NoSuchFileException("Blob object [" + blobName + "] not found: " + e.getMessage()); } } throw e; @@ -115,6 +115,10 @@ public class S3BlobContainer extends AbstractBlobContainer { @Override public void deleteBlob(String blobName) throws IOException { + if (!blobExists(blobName)) { + throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); + } + try { blobStore.client().deleteObject(blobStore.bucket(), buildKey(blobName)); } catch (AmazonClientException e) { diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/MockAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/MockAmazonS3.java new file mode 100644 index 00000000000..8124f693943 --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/MockAmazonS3.java @@ -0,0 +1,200 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cloud.aws.blobstore; + +import com.amazonaws.AmazonClientException; +import com.amazonaws.AmazonServiceException; +import com.amazonaws.services.s3.AbstractAmazonS3; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.CopyObjectRequest; +import com.amazonaws.services.s3.model.CopyObjectResult; +import com.amazonaws.services.s3.model.DeleteObjectRequest; +import com.amazonaws.services.s3.model.GetObjectMetadataRequest; +import com.amazonaws.services.s3.model.GetObjectRequest; +import com.amazonaws.services.s3.model.ListObjectsRequest; +import com.amazonaws.services.s3.model.ObjectListing; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.PutObjectRequest; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.S3ObjectInputStream; +import com.amazonaws.services.s3.model.S3ObjectSummary; +import com.amazonaws.util.Base64; + +import java.io.IOException; +import java.io.InputStream; +import java.security.DigestInputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +class MockAmazonS3 extends AbstractAmazonS3 { + + private Map blobs = new ConcurrentHashMap<>(); + + // in ESBlobStoreContainerTestCase.java, the maximum + // length of the input data is 100 bytes + private byte[] byteCounter = new byte[100]; + + @Override + public boolean doesBucketExist(String bucket) { + return true; + } + + @Override + public ObjectMetadata getObjectMetadata( + GetObjectMetadataRequest getObjectMetadataRequest) + throws AmazonClientException, AmazonServiceException { + String blobName = getObjectMetadataRequest.getKey(); + + if (!blobs.containsKey(blobName)) { + throw new AmazonS3Exception("[" + blobName + "] does not exist."); + } + + return new ObjectMetadata(); // nothing is done with it + } + + @Override + public PutObjectResult putObject(PutObjectRequest putObjectRequest) + throws AmazonClientException, AmazonServiceException { + String blobName = putObjectRequest.getKey(); + DigestInputStream stream = (DigestInputStream) putObjectRequest.getInputStream(); + + if (blobs.containsKey(blobName)) { + throw new AmazonS3Exception("[" + blobName + "] already exists."); + } + + blobs.put(blobName, stream); + + // input and output md5 hashes need to match to avoid an exception + String md5 = Base64.encodeAsString(stream.getMessageDigest().digest()); + PutObjectResult result = new PutObjectResult(); + result.setContentMd5(md5); + + return result; + } + + @Override + public S3Object getObject(GetObjectRequest getObjectRequest) + throws AmazonClientException, AmazonServiceException { + // in ESBlobStoreContainerTestCase.java, the prefix is empty, + // so the key and blobName are equivalent to each other + String blobName = getObjectRequest.getKey(); + + if (!blobs.containsKey(blobName)) { + throw new AmazonS3Exception("[" + blobName + "] does not exist."); + } + + // the HTTP request attribute is irrelevant for reading + S3ObjectInputStream stream = new S3ObjectInputStream( + blobs.get(blobName), null, false); + S3Object s3Object = new S3Object(); + s3Object.setObjectContent(stream); + return s3Object; + } + + @Override + public ObjectListing listObjects(ListObjectsRequest listObjectsRequest) + throws AmazonClientException, AmazonServiceException { + MockObjectListing list = new MockObjectListing(); + list.setTruncated(false); + + String blobName; + String prefix = listObjectsRequest.getPrefix(); + + ArrayList mockObjectSummaries = new ArrayList<>(); + + for (Map.Entry blob : blobs.entrySet()) { + blobName = blob.getKey(); + S3ObjectSummary objectSummary = new S3ObjectSummary(); + + if (prefix.isEmpty() || blobName.startsWith(prefix)) { + objectSummary.setKey(blobName); + + try { + objectSummary.setSize(getSize(blob.getValue())); + } catch (IOException e) { + throw new AmazonS3Exception("Object listing " + + "failed for blob [" + blob.getKey() + "]"); + } + + mockObjectSummaries.add(objectSummary); + } + } + + list.setObjectSummaries(mockObjectSummaries); + return list; + } + + @Override + public CopyObjectResult copyObject(CopyObjectRequest copyObjectRequest) + throws AmazonClientException, AmazonServiceException { + String sourceBlobName = copyObjectRequest.getSourceKey(); + String targetBlobName = copyObjectRequest.getDestinationKey(); + + if (!blobs.containsKey(sourceBlobName)) { + throw new AmazonS3Exception("Source blob [" + + sourceBlobName + "] does not exist."); + } + + if (blobs.containsKey(targetBlobName)) { + throw new AmazonS3Exception("Target blob [" + + targetBlobName + "] already exists."); + } + + blobs.put(targetBlobName, blobs.get(sourceBlobName)); + return new CopyObjectResult(); // nothing is done with it + } + + @Override + public void deleteObject(DeleteObjectRequest deleteObjectRequest) + throws AmazonClientException, AmazonServiceException { + String blobName = deleteObjectRequest.getKey(); + + if (!blobs.containsKey(blobName)) { + throw new AmazonS3Exception("[" + blobName + "] does not exist."); + } + + blobs.remove(blobName); + } + + private int getSize(InputStream stream) throws IOException { + int size = stream.read(byteCounter); + stream.reset(); // in case we ever need the size again + return size; + } + + private class MockObjectListing extends ObjectListing { + // the objectSummaries attribute in ObjectListing.java + // is read-only, but we need to be able to write to it, + // so we create a mock of it to work around this + private List mockObjectSummaries; + + @Override + public List getObjectSummaries() { + return mockObjectSummaries; + } + + private void setObjectSummaries(List objectSummaries) { + mockObjectSummaries = objectSummaries; + } + } +} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java new file mode 100644 index 00000000000..bca1c1d8a18 --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cloud.aws.blobstore; + +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; + +import java.io.IOException; +import java.util.Locale; + +public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { + protected BlobStore newBlobStore() throws IOException { + MockAmazonS3 client = new MockAmazonS3(); + String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + + return new S3BlobStore(Settings.EMPTY, client, bucket, null, false, + new ByteSizeValue(10, ByteSizeUnit.MB), 5, "public-read-write", "standard"); + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_operation.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yaml similarity index 78% rename from rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_operation.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yaml index 7dbc84a5078..d01e88be8ad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_operation.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yaml @@ -1,5 +1,5 @@ --- -"Delete operation field": +"Delete result field": - do: index: @@ -14,7 +14,7 @@ type: test id: 1 - - match: { _operation: delete } + - match: { result: deleted } - do: catch: missing @@ -23,4 +23,4 @@ type: test id: 1 - - match: { _operation: noop } + - match: { result: not_found } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_operation.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yaml similarity index 75% rename from rest-api-spec/src/main/resources/rest-api-spec/test/index/12_operation.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yaml index a935bda420d..45ebe0bbd3d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_operation.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yaml @@ -1,5 +1,5 @@ --- -"Index operation field": +"Index result field": - do: index: @@ -8,7 +8,7 @@ id: 1 body: { foo: bar } - - match: { _operation: create } + - match: { result: created } - do: index: @@ -18,4 +18,4 @@ body: { foo: bar } op_type: index - - match: { _operation: index } + - match: { result: updated } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_operation.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yaml similarity index 86% rename from rest-api-spec/src/main/resources/rest-api-spec/test/update/12_operation.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yaml index abbb8d4a59a..9adada6d54b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_operation.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yaml @@ -1,5 +1,5 @@ --- -"Update operation field": +"Update result field": - do: update: @@ -11,7 +11,7 @@ doc_as_upsert: true - match: { _version: 1 } - - match: { _operation: create } + - match: { result: created } - do: update: @@ -23,7 +23,7 @@ doc_as_upsert: true - match: { _version: 1 } - - match: { _operation: noop } + - match: { result: noop } - do: update: @@ -36,7 +36,7 @@ detect_noop: false - match: { _version: 2 } - - match: { _operation: index } + - match: { result: updated } - do: update: @@ -49,4 +49,4 @@ detect_noop: true - match: { _version: 3 } - - match: { _operation: index } + - match: { result: updated } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java index 6ff0b71cdcc..aedbc946d74 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java @@ -112,17 +112,34 @@ public abstract class ESBlobStoreContainerTestCase extends ESTestCase { } } + public void testDeleteBlob() throws IOException { + try (final BlobStore store = newBlobStore()) { + final String blobName = "foobar"; + final BlobContainer container = store.blobContainer(new BlobPath()); + expectThrows(IOException.class, () -> container.deleteBlob(blobName)); + + byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); + final BytesArray bytesArray = new BytesArray(data); + container.writeBlob(blobName, bytesArray); + container.deleteBlob(blobName); // should not raise + + // blob deleted, so should raise again + expectThrows(IOException.class, () -> container.deleteBlob(blobName)); + } + } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/15579") - public void testOverwriteFails() throws IOException { + public void testVerifyOverwriteFails() throws IOException { try (final BlobStore store = newBlobStore()) { final String blobName = "foobar"; final BlobContainer container = store.blobContainer(new BlobPath()); byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); final BytesArray bytesArray = new BytesArray(data); container.writeBlob(blobName, bytesArray); + // should not be able to overwrite existing blob expectThrows(IOException.class, () -> container.writeBlob(blobName, bytesArray)); container.deleteBlob(blobName); - container.writeBlob(blobName, bytesArray); // deleted it, so should be able to write it again + container.writeBlob(blobName, bytesArray); // after deleting the previous blob, we should be able to write to it again } } diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index b6fddaa427c..e036676677f 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -34,6 +34,15 @@ import java.util.function.Function; /** * A mocked script engine that can be used for testing purpose. + * + * This script engine allows to define a set of predefined scripts that basically a combination of a key and a + * function: + * + * The key can be anything as long as it is a {@link String} and is used to resolve the scripts + * at compilation time. For inline scripts, the key can be a description of the script. For stored and file scripts, + * the source must match a key in the predefined set of scripts. + * + * The function is used to provide the result of the script execution and can return anything. */ public class MockScriptEngine implements ScriptEngineService { @@ -63,7 +72,13 @@ public class MockScriptEngine implements ScriptEngineService { @Override public Object compile(String name, String source, Map params) { + // Scripts are always resolved using the script's source. For inline scripts, it's easy because they don't have names and the + // source is always provided. For stored and file scripts, the source of the script must match the key of a predefined script. Function, Object> script = scripts.get(source); + if (script == null) { + throw new IllegalArgumentException("No pre defined script matching [" + source + "] for script with name [" + name + "], " + + "did you declare the mocked script?"); + } return new MockCompiledScript(name, params, source, script); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index d864dc732c0..7dc358693fa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -21,7 +21,6 @@ package org.elasticsearch.test; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; - import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -94,6 +93,7 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; @@ -861,7 +861,7 @@ public abstract class AbstractQueryTestCase> new Class[]{Client.class}, clientInvocationHandler); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); - ScriptModule scriptModule = newTestScriptModule(); + ScriptModule scriptModule = createScriptModule(pluginsService.filterPlugins(ScriptPlugin.class)); List> scriptSettings = scriptModule.getSettings(); scriptSettings.addAll(pluginsService.getPluginSettings()); scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); @@ -970,6 +970,20 @@ public abstract class AbstractQueryTestCase> scriptService, indicesQueriesRegistry, client, null, state); } + ScriptModule createScriptModule(List scriptPlugins) { + if (scriptPlugins == null || scriptPlugins.isEmpty()) { + return newTestScriptModule(); + } + + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + // no file watching, so we don't need a ResourceWatcherService + .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) + .build(); + Environment environment = new Environment(settings); + return ScriptModule.create(settings, environment, null, scriptPlugins); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 672f4612cc6..2c1de155534 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1382,8 +1382,8 @@ public abstract class ESIntegTestCase extends ESTestCase { // delete the bogus types again - it might trigger merges or at least holes in the segments and enforces deleted docs! for (Tuple doc : bogusIds) { assertEquals("failed to delete a dummy doc [" + doc.v1() + "][" + doc.v2() + "]", - DocWriteResponse.Operation.DELETE, - client().prepareDelete(doc.v1(), RANDOM_BOGUS_TYPE, doc.v2()).get().getOperation()); + DocWriteResponse.Result.DELETED, + client().prepareDelete(doc.v1(), RANDOM_BOGUS_TYPE, doc.v2()).get().getResult()); } } if (forceRefresh) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index c9ecf2ec639..95fd4186ed0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -835,7 +835,8 @@ public abstract class ESTestCase extends LuceneTestCase { .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) .build(); Environment environment = new Environment(settings); - return new ScriptModule(settings, environment, null, singletonList(new MockScriptEngine()), emptyList()); + MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", script -> "1")); + return new ScriptModule(settings, environment, null, singletonList(scriptEngine), emptyList()); } /** Creates an IndicesModule for testing with the given mappers and metadata mappers. */