Merge branch 'master' into feature/rank-eval
This commit is contained in:
commit
0188363a40
|
@ -1084,13 +1084,6 @@
|
|||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]MoreExpressionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyPlugin.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptEngineService.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]IPv4RangeTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]IndexLookupTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]RandomScoreFunctionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]ScriptedMetricTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]SearchFieldsTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]SimpleSortTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]package-info.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovySecurityTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolateRequest.java" checks="LineLength" />
|
||||
|
|
|
@ -40,16 +40,21 @@ import java.util.Locale;
|
|||
*/
|
||||
public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContent {
|
||||
|
||||
public enum Operation implements Writeable {
|
||||
CREATE(0),
|
||||
INDEX(1),
|
||||
DELETE(2),
|
||||
NOOP(3);
|
||||
/**
|
||||
* An enum that represents the the results of CRUD operations, primarily used to communicate the type of
|
||||
* operation that occurred.
|
||||
*/
|
||||
public enum Result implements Writeable {
|
||||
CREATED(0),
|
||||
UPDATED(1),
|
||||
DELETED(2),
|
||||
NOT_FOUND(3),
|
||||
NOOP(4);
|
||||
|
||||
private final byte op;
|
||||
private final String lowercase;
|
||||
|
||||
Operation(int op) {
|
||||
Result(int op) {
|
||||
this.op = (byte) op;
|
||||
this.lowercase = this.toString().toLowerCase(Locale.ENGLISH);
|
||||
}
|
||||
|
@ -62,19 +67,21 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
return lowercase;
|
||||
}
|
||||
|
||||
public static Operation readFrom(StreamInput in) throws IOException{
|
||||
public static Result readFrom(StreamInput in) throws IOException{
|
||||
Byte opcode = in.readByte();
|
||||
switch(opcode){
|
||||
case 0:
|
||||
return CREATE;
|
||||
return CREATED;
|
||||
case 1:
|
||||
return INDEX;
|
||||
return UPDATED;
|
||||
case 2:
|
||||
return DELETE;
|
||||
return DELETED;
|
||||
case 3:
|
||||
return NOT_FOUND;
|
||||
case 4:
|
||||
return NOOP;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown operation code: " + opcode);
|
||||
throw new IllegalArgumentException("Unknown result code: " + opcode);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,14 +96,14 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
private String type;
|
||||
private long version;
|
||||
private boolean forcedRefresh;
|
||||
protected Operation operation;
|
||||
protected Result result;
|
||||
|
||||
public DocWriteResponse(ShardId shardId, String type, String id, long version, Operation operation) {
|
||||
public DocWriteResponse(ShardId shardId, String type, String id, long version, Result result) {
|
||||
this.shardId = shardId;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.version = version;
|
||||
this.operation = operation;
|
||||
this.result = result;
|
||||
}
|
||||
|
||||
// needed for deserialization
|
||||
|
@ -106,8 +113,8 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
/**
|
||||
* The change that occurred to the document.
|
||||
*/
|
||||
public Operation getOperation() {
|
||||
return operation;
|
||||
public Result getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -198,7 +205,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
id = in.readString();
|
||||
version = in.readZLong();
|
||||
forcedRefresh = in.readBoolean();
|
||||
operation = Operation.readFrom(in);
|
||||
result = Result.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -209,7 +216,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
out.writeString(id);
|
||||
out.writeZLong(version);
|
||||
out.writeBoolean(forcedRefresh);
|
||||
operation.writeTo(out);
|
||||
result.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -219,7 +226,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
.field("_type", type)
|
||||
.field("_id", id)
|
||||
.field("_version", version)
|
||||
.field("_operation", getOperation().getLowercase());
|
||||
.field("result", getResult().getLowercase());
|
||||
if (forcedRefresh) {
|
||||
builder.field("forced_refresh", forcedRefresh);
|
||||
}
|
||||
|
|
|
@ -239,16 +239,16 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
if (updateResult.writeResult != null) {
|
||||
location = locationToSync(location, updateResult.writeResult.getLocation());
|
||||
}
|
||||
switch (updateResult.result.operation()) {
|
||||
case CREATE:
|
||||
case INDEX:
|
||||
switch (updateResult.result.getResponseResult()) {
|
||||
case CREATED:
|
||||
case UPDATED:
|
||||
@SuppressWarnings("unchecked")
|
||||
WriteResult<IndexResponse> result = updateResult.writeResult;
|
||||
IndexRequest indexRequest = updateResult.request();
|
||||
BytesReference indexSourceAsBytes = indexRequest.source();
|
||||
// add the response
|
||||
IndexResponse indexResponse = result.getResponse();
|
||||
UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.getOperation());
|
||||
UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.getResult());
|
||||
if (updateRequest.fields() != null && updateRequest.fields().length > 0) {
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true);
|
||||
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
|
||||
|
@ -256,12 +256,12 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), indexRequest);
|
||||
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, updateResponse));
|
||||
break;
|
||||
case DELETE:
|
||||
case DELETED:
|
||||
@SuppressWarnings("unchecked")
|
||||
WriteResult<DeleteResponse> writeResult = updateResult.writeResult;
|
||||
DeleteResponse response = writeResult.getResponse();
|
||||
DeleteRequest deleteRequest = updateResult.request();
|
||||
updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation());
|
||||
updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult());
|
||||
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null));
|
||||
// Replace the update request to the translated delete request to execute on the replica.
|
||||
item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), deleteRequest);
|
||||
|
@ -271,6 +271,8 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, updateResult.noopResult));
|
||||
item.setIgnoreOnReplica(); // no need to go to the replica
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Illegal operation " + updateResult.result.getResponseResult());
|
||||
}
|
||||
// NOTE: Breaking out of the retry_on_conflict loop!
|
||||
break;
|
||||
|
@ -299,20 +301,22 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
} else if (updateResult.result == null) {
|
||||
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, new BulkItemResponse.Failure(request.index(), updateRequest.type(), updateRequest.id(), e)));
|
||||
} else {
|
||||
switch (updateResult.result.operation()) {
|
||||
case CREATE:
|
||||
case INDEX:
|
||||
switch (updateResult.result.getResponseResult()) {
|
||||
case CREATED:
|
||||
case UPDATED:
|
||||
IndexRequest indexRequest = updateResult.request();
|
||||
logFailure(e, "index", request.shardId(), indexRequest);
|
||||
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE,
|
||||
new BulkItemResponse.Failure(request.index(), indexRequest.type(), indexRequest.id(), e)));
|
||||
break;
|
||||
case DELETE:
|
||||
case DELETED:
|
||||
DeleteRequest deleteRequest = updateResult.request();
|
||||
logFailure(e, "delete", request.shardId(), deleteRequest);
|
||||
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_DELETE,
|
||||
new BulkItemResponse.Failure(request.index(), deleteRequest.type(), deleteRequest.id(), e)));
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Illegal operation " + updateResult.result.getResponseResult());
|
||||
}
|
||||
}
|
||||
// NOTE: Breaking out of the retry_on_conflict loop!
|
||||
|
@ -399,9 +403,9 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
|
||||
private UpdateResult shardUpdateOperation(IndexMetaData metaData, BulkShardRequest bulkShardRequest, UpdateRequest updateRequest, IndexShard indexShard) {
|
||||
UpdateHelper.Result translate = updateHelper.prepare(updateRequest, indexShard);
|
||||
switch (translate.operation()) {
|
||||
case CREATE:
|
||||
case INDEX:
|
||||
switch (translate.getResponseResult()) {
|
||||
case CREATED:
|
||||
case UPDATED:
|
||||
IndexRequest indexRequest = translate.action();
|
||||
try {
|
||||
WriteResult result = shardIndexOperation(bulkShardRequest, indexRequest, metaData, indexShard, false);
|
||||
|
@ -414,7 +418,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
}
|
||||
return new UpdateResult(translate, indexRequest, retry, cause, null);
|
||||
}
|
||||
case DELETE:
|
||||
case DELETED:
|
||||
DeleteRequest deleteRequest = translate.action();
|
||||
try {
|
||||
WriteResult<DeleteResponse> result = TransportDeleteAction.executeDeleteRequestOnPrimary(deleteRequest, indexShard);
|
||||
|
@ -432,7 +436,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
indexShard.noopUpdate(updateRequest.type());
|
||||
return new UpdateResult(translate, updateResponse);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal update operation " + translate.operation());
|
||||
throw new IllegalStateException("Illegal update operation " + translate.getResponseResult());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,17 +39,17 @@ public class DeleteResponse extends DocWriteResponse {
|
|||
}
|
||||
|
||||
public DeleteResponse(ShardId shardId, String type, String id, long version, boolean found) {
|
||||
super(shardId, type, id, version, found ? Operation.DELETE : Operation.NOOP);
|
||||
super(shardId, type, id, version, found ? Result.DELETED : Result.NOT_FOUND);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return operation == Operation.DELETE ? super.status() : RestStatus.NOT_FOUND;
|
||||
return result == Result.DELETED ? super.status() : RestStatus.NOT_FOUND;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field("found", operation == Operation.DELETE);
|
||||
builder.field("found", result == Result.DELETED);
|
||||
super.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class DeleteResponse extends DocWriteResponse {
|
|||
builder.append(",type=").append(getType());
|
||||
builder.append(",id=").append(getId());
|
||||
builder.append(",version=").append(getVersion());
|
||||
builder.append(",operation=").append(getOperation().getLowercase());
|
||||
builder.append(",result=").append(getResult().getLowercase());
|
||||
builder.append(",shards=").append(getShardInfo());
|
||||
return builder.append("]").toString();
|
||||
}
|
||||
|
|
|
@ -39,12 +39,12 @@ public class IndexResponse extends DocWriteResponse {
|
|||
}
|
||||
|
||||
public IndexResponse(ShardId shardId, String type, String id, long version, boolean created) {
|
||||
super(shardId, type, id, version, created ? Operation.CREATE : Operation.INDEX);
|
||||
super(shardId, type, id, version, created ? Result.CREATED : Result.UPDATED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return operation == Operation.CREATE ? RestStatus.CREATED : super.status();
|
||||
return result == Result.CREATED ? RestStatus.CREATED : super.status();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -55,7 +55,7 @@ public class IndexResponse extends DocWriteResponse {
|
|||
builder.append(",type=").append(getType());
|
||||
builder.append(",id=").append(getId());
|
||||
builder.append(",version=").append(getVersion());
|
||||
builder.append(",operation=").append(getOperation().getLowercase());
|
||||
builder.append(",result=").append(getResult().getLowercase());
|
||||
builder.append(",shards=").append(getShardInfo());
|
||||
return builder.append("]").toString();
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ public class IndexResponse extends DocWriteResponse {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
super.toXContent(builder, params);
|
||||
builder.field("created", operation == Operation.CREATE);
|
||||
builder.field("created", result == Result.CREATED);
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.action.update;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.RoutingMissingException;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
|
@ -178,15 +177,15 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
|
||||
final IndexShard indexShard = indexService.getShard(shardId.getId());
|
||||
final UpdateHelper.Result result = updateHelper.prepare(request, indexShard);
|
||||
switch (result.operation()) {
|
||||
case CREATE:
|
||||
switch (result.getResponseResult()) {
|
||||
case CREATED:
|
||||
IndexRequest upsertRequest = result.action();
|
||||
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
|
||||
final BytesReference upsertSourceBytes = upsertRequest.source();
|
||||
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse response) {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation());
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult());
|
||||
if (request.fields() != null && request.fields().length > 0) {
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true);
|
||||
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
|
||||
|
@ -217,14 +216,14 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
}
|
||||
});
|
||||
break;
|
||||
case INDEX:
|
||||
case UPDATED:
|
||||
IndexRequest indexRequest = result.action();
|
||||
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
|
||||
final BytesReference indexSourceBytes = indexRequest.source();
|
||||
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse response) {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation());
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult());
|
||||
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes));
|
||||
update.setForcedRefresh(response.forcedRefresh());
|
||||
listener.onResponse(update);
|
||||
|
@ -248,12 +247,12 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
}
|
||||
});
|
||||
break;
|
||||
case DELETE:
|
||||
case DELETED:
|
||||
DeleteRequest deleteRequest = result.action();
|
||||
deleteAction.execute(deleteRequest, new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse response) {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation());
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult());
|
||||
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null));
|
||||
update.setForcedRefresh(response.forcedRefresh());
|
||||
listener.onResponse(update);
|
||||
|
@ -289,7 +288,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
listener.onResponse(update);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Illegal operation " + result.operation());
|
||||
throw new IllegalStateException("Illegal result " + result.getResponseResult());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -117,9 +117,9 @@ public class UpdateHelper extends AbstractComponent {
|
|||
request.script.getScript());
|
||||
}
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(),
|
||||
getResult.getVersion(), DocWriteResponse.Operation.NOOP);
|
||||
getResult.getVersion(), DocWriteResponse.Result.NOOP);
|
||||
update.setGetResult(getResult);
|
||||
return new Result(update, DocWriteResponse.Operation.NOOP, upsertDoc, XContentType.JSON);
|
||||
return new Result(update, DocWriteResponse.Result.NOOP, upsertDoc, XContentType.JSON);
|
||||
}
|
||||
indexRequest.source((Map) ctx.get("_source"));
|
||||
}
|
||||
|
@ -136,7 +136,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
// in all but the internal versioning mode, we want to create the new document using the given version.
|
||||
indexRequest.version(request.version()).versionType(request.versionType());
|
||||
}
|
||||
return new Result(indexRequest, DocWriteResponse.Operation.CREATE, null, null);
|
||||
return new Result(indexRequest, DocWriteResponse.Result.CREATED, null, null);
|
||||
}
|
||||
|
||||
long updateVersion = getResult.getVersion();
|
||||
|
@ -227,21 +227,21 @@ public class UpdateHelper extends AbstractComponent {
|
|||
.consistencyLevel(request.consistencyLevel())
|
||||
.timestamp(timestamp).ttl(ttl)
|
||||
.setRefreshPolicy(request.getRefreshPolicy());
|
||||
return new Result(indexRequest, DocWriteResponse.Operation.INDEX, updatedSourceAsMap, updateSourceContentType);
|
||||
return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType);
|
||||
} else if ("delete".equals(operation)) {
|
||||
DeleteRequest deleteRequest = Requests.deleteRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent)
|
||||
.version(updateVersion).versionType(request.versionType())
|
||||
.consistencyLevel(request.consistencyLevel())
|
||||
.setRefreshPolicy(request.getRefreshPolicy());
|
||||
return new Result(deleteRequest, DocWriteResponse.Operation.DELETE, updatedSourceAsMap, updateSourceContentType);
|
||||
return new Result(deleteRequest, DocWriteResponse.Result.DELETED, updatedSourceAsMap, updateSourceContentType);
|
||||
} else if ("none".equals(operation)) {
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Operation.NOOP);
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP);
|
||||
update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
|
||||
return new Result(update, DocWriteResponse.Operation.NOOP, updatedSourceAsMap, updateSourceContentType);
|
||||
return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType);
|
||||
} else {
|
||||
logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript());
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Operation.NOOP);
|
||||
return new Result(update, DocWriteResponse.Operation.NOOP, updatedSourceAsMap, updateSourceContentType);
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP);
|
||||
return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -310,13 +310,13 @@ public class UpdateHelper extends AbstractComponent {
|
|||
public static class Result {
|
||||
|
||||
private final Streamable action;
|
||||
private final DocWriteResponse.Operation operation;
|
||||
private final DocWriteResponse.Result result;
|
||||
private final Map<String, Object> updatedSourceAsMap;
|
||||
private final XContentType updateSourceContentType;
|
||||
|
||||
public Result(Streamable action, DocWriteResponse.Operation operation, Map<String, Object> updatedSourceAsMap, XContentType updateSourceContentType) {
|
||||
public Result(Streamable action, DocWriteResponse.Result result, Map<String, Object> updatedSourceAsMap, XContentType updateSourceContentType) {
|
||||
this.action = action;
|
||||
this.operation = operation;
|
||||
this.result = result;
|
||||
this.updatedSourceAsMap = updatedSourceAsMap;
|
||||
this.updateSourceContentType = updateSourceContentType;
|
||||
}
|
||||
|
@ -326,8 +326,8 @@ public class UpdateHelper extends AbstractComponent {
|
|||
return (T) action;
|
||||
}
|
||||
|
||||
public DocWriteResponse.Operation operation() {
|
||||
return operation;
|
||||
public DocWriteResponse.Result getResponseResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
public Map<String, Object> updatedSourceAsMap() {
|
||||
|
|
|
@ -40,13 +40,13 @@ public class UpdateResponse extends DocWriteResponse {
|
|||
* Constructor to be used when a update didn't translate in a write.
|
||||
* For example: update script with operation set to none
|
||||
*/
|
||||
public UpdateResponse(ShardId shardId, String type, String id, long version, Operation operation) {
|
||||
this(new ShardInfo(0, 0), shardId, type, id, version, operation);
|
||||
public UpdateResponse(ShardId shardId, String type, String id, long version, Result result) {
|
||||
this(new ShardInfo(0, 0), shardId, type, id, version, result);
|
||||
}
|
||||
|
||||
public UpdateResponse(ShardInfo shardInfo, ShardId shardId, String type, String id,
|
||||
long version, Operation operation) {
|
||||
super(shardId, type, id, version, operation);
|
||||
long version, Result result) {
|
||||
super(shardId, type, id, version, result);
|
||||
setShardInfo(shardInfo);
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,7 @@ public class UpdateResponse extends DocWriteResponse {
|
|||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return this.operation == Operation.CREATE ? RestStatus.CREATED : super.status();
|
||||
return this.result == Result.CREATED ? RestStatus.CREATED : super.status();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -106,7 +106,7 @@ public class UpdateResponse extends DocWriteResponse {
|
|||
builder.append(",type=").append(getType());
|
||||
builder.append(",id=").append(getId());
|
||||
builder.append(",version=").append(getVersion());
|
||||
builder.append(",operation=").append(getOperation().getLowercase());
|
||||
builder.append(",result=").append(getResult().getLowercase());
|
||||
builder.append(",shards=").append(getShardInfo());
|
||||
return builder.append("]").toString();
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.snapshots.Snapshot;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -70,12 +71,12 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
|
|||
private final boolean includeGlobalState;
|
||||
private final boolean partial;
|
||||
private final ImmutableOpenMap<ShardId, ShardSnapshotStatus> shards;
|
||||
private final List<String> indices;
|
||||
private final List<IndexId> indices;
|
||||
private final ImmutableOpenMap<String, List<ShardId>> waitingIndices;
|
||||
private final long startTime;
|
||||
|
||||
public Entry(Snapshot snapshot, boolean includeGlobalState, boolean partial, State state, List<String> indices, long startTime,
|
||||
ImmutableOpenMap<ShardId, ShardSnapshotStatus> shards) {
|
||||
public Entry(Snapshot snapshot, boolean includeGlobalState, boolean partial, State state, List<IndexId> indices,
|
||||
long startTime, ImmutableOpenMap<ShardId, ShardSnapshotStatus> shards) {
|
||||
this.state = state;
|
||||
this.snapshot = snapshot;
|
||||
this.includeGlobalState = includeGlobalState;
|
||||
|
@ -111,7 +112,7 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
|
|||
return state;
|
||||
}
|
||||
|
||||
public List<String> indices() {
|
||||
public List<IndexId> indices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
|
@ -377,9 +378,9 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
|
|||
boolean partial = in.readBoolean();
|
||||
State state = State.fromValue(in.readByte());
|
||||
int indices = in.readVInt();
|
||||
List<String> indexBuilder = new ArrayList<>();
|
||||
List<IndexId> indexBuilder = new ArrayList<>();
|
||||
for (int j = 0; j < indices; j++) {
|
||||
indexBuilder.add(in.readString());
|
||||
indexBuilder.add(new IndexId(in.readString(), in.readString()));
|
||||
}
|
||||
long startTime = in.readLong();
|
||||
ImmutableOpenMap.Builder<ShardId, ShardSnapshotStatus> builder = ImmutableOpenMap.builder();
|
||||
|
@ -410,8 +411,8 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
|
|||
out.writeBoolean(entry.partial());
|
||||
out.writeByte(entry.state().value());
|
||||
out.writeVInt(entry.indices().size());
|
||||
for (String index : entry.indices()) {
|
||||
out.writeString(index);
|
||||
for (IndexId index : entry.indices()) {
|
||||
index.writeTo(out);
|
||||
}
|
||||
out.writeLong(entry.startTime());
|
||||
out.writeVInt(entry.shards().size());
|
||||
|
@ -458,8 +459,8 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
|
|||
builder.field(STATE, entry.state());
|
||||
builder.startArray(INDICES);
|
||||
{
|
||||
for (String index : entry.indices()) {
|
||||
builder.value(index);
|
||||
for (IndexId index : entry.indices()) {
|
||||
index.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collection;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
|
@ -53,7 +53,8 @@ public interface BlobContainer {
|
|||
* @param blobName
|
||||
* The name of the blob to get an {@link InputStream} for.
|
||||
* @return The {@code InputStream} to read the blob.
|
||||
* @throws IOException if the blob does not exist or can not be read.
|
||||
* @throws NoSuchFileException if the blob does not exist
|
||||
* @throws IOException if the blob can not be read.
|
||||
*/
|
||||
InputStream readBlob(String blobName) throws IOException;
|
||||
|
||||
|
@ -95,7 +96,8 @@ public interface BlobContainer {
|
|||
*
|
||||
* @param blobName
|
||||
* The name of the blob to delete.
|
||||
* @throws IOException if the blob does not exist, or if the blob exists but could not be deleted.
|
||||
* @throws NoSuchFileException if the blob does not exist
|
||||
* @throws IOException if the blob exists but could not be deleted.
|
||||
*/
|
||||
void deleteBlob(String blobName) throws IOException;
|
||||
|
||||
|
|
|
@ -27,13 +27,16 @@ import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
|||
import org.elasticsearch.common.io.Streams;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -85,7 +88,7 @@ public class FsBlobContainer extends AbstractBlobContainer {
|
|||
@Override
|
||||
public void deleteBlob(String blobName) throws IOException {
|
||||
Path blobPath = path.resolve(blobName);
|
||||
Files.deleteIfExists(blobPath);
|
||||
Files.delete(blobPath);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -95,14 +98,18 @@ public class FsBlobContainer extends AbstractBlobContainer {
|
|||
|
||||
@Override
|
||||
public InputStream readBlob(String name) throws IOException {
|
||||
return new BufferedInputStream(Files.newInputStream(path.resolve(name)), blobStore.bufferSizeInBytes());
|
||||
final Path resolvedPath = path.resolve(name);
|
||||
try {
|
||||
return new BufferedInputStream(Files.newInputStream(resolvedPath), blobStore.bufferSizeInBytes());
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
throw new NoSuchFileException("[" + name + "] blob not found");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException {
|
||||
final Path file = path.resolve(blobName);
|
||||
// TODO: why is this not specifying CREATE_NEW? Do we really need to be able to truncate existing files?
|
||||
try (OutputStream outputStream = Files.newOutputStream(file)) {
|
||||
try (OutputStream outputStream = Files.newOutputStream(file, StandardOpenOption.CREATE_NEW)) {
|
||||
Streams.copy(inputStream, outputStream, new byte[blobStore.bufferSizeInBytes()]);
|
||||
}
|
||||
IOUtils.fsync(file, false);
|
||||
|
|
|
@ -20,14 +20,11 @@
|
|||
package org.elasticsearch.common.blobstore.support;
|
||||
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A base abstract blob container that implements higher level container methods.
|
||||
|
|
|
@ -30,11 +30,14 @@ import java.util.Set;
|
|||
/**
|
||||
* A utility class for multi threaded operation that needs to be cancellable via interrupts. Every cancellable operation should be
|
||||
* executed via {@link #execute(Interruptable)}, which will capture the executing thread and make sure it is interrupted in the case
|
||||
* cancellation.
|
||||
* of cancellation.
|
||||
*
|
||||
* Cancellation policy: This class does not support external interruption via <code>Thread#interrupt()</code>. Always use #cancel() instead.
|
||||
*/
|
||||
public class CancellableThreads {
|
||||
private final Set<Thread> threads = new HashSet<>();
|
||||
private boolean cancelled = false;
|
||||
// needs to be volatile as it is also read outside of synchronized blocks.
|
||||
private volatile boolean cancelled = false;
|
||||
private String reason;
|
||||
|
||||
public synchronized boolean isCancelled() {
|
||||
|
@ -94,13 +97,18 @@ public class CancellableThreads {
|
|||
*/
|
||||
public void executeIO(IOInterruptable interruptable) throws IOException {
|
||||
boolean wasInterrupted = add();
|
||||
boolean cancelledByExternalInterrupt = false;
|
||||
RuntimeException runtimeException = null;
|
||||
IOException ioException = null;
|
||||
|
||||
try {
|
||||
interruptable.run();
|
||||
} catch (InterruptedException | ThreadInterruptedException e) {
|
||||
// assume this is us and ignore
|
||||
// ignore, this interrupt has been triggered by us in #cancel()...
|
||||
assert cancelled : "Interruption via Thread#interrupt() is unsupported. Use CancellableThreads#cancel() instead";
|
||||
// we can only reach here if assertions are disabled. If we reach this code and cancelled is false, this means that we've
|
||||
// been interrupted externally (which we don't support).
|
||||
cancelledByExternalInterrupt = !cancelled;
|
||||
} catch (RuntimeException t) {
|
||||
runtimeException = t;
|
||||
} catch (IOException e) {
|
||||
|
@ -128,6 +136,12 @@ public class CancellableThreads {
|
|||
throw runtimeException;
|
||||
}
|
||||
}
|
||||
if (cancelledByExternalInterrupt) {
|
||||
// restore interrupt flag to at least adhere to expected behavior
|
||||
Thread.currentThread().interrupt();
|
||||
throw new RuntimeException("Interruption via Thread#interrupt() is unsupported. Use CancellableThreads#cancel() instead");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -281,16 +281,16 @@ public class NodeJoinController extends AbstractComponent {
|
|||
Map<DiscoveryNode, ClusterStateTaskListener> tasks = getPendingAsTasks();
|
||||
final String source = "zen-disco-elected-as-master ([" + tasks.size() + "] nodes joined)";
|
||||
|
||||
tasks.put(BECOME_MASTER_TASK, joinProcessedListener);
|
||||
tasks.put(BECOME_MASTER_TASK, (source1, e) -> {}); // noop listener, the election finished listener determines result
|
||||
tasks.put(FINISH_ELECTION_TASK, electionFinishedListener);
|
||||
clusterService.submitStateUpdateTasks(source, tasks, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor);
|
||||
}
|
||||
|
||||
public synchronized void closeAndProcessPending(String reason) {
|
||||
innerClose();
|
||||
Map<DiscoveryNode, ClusterStateTaskListener> tasks = getPendingAsTasks();
|
||||
final String source = "zen-disco-process-pending-joins [" + reason + "]";
|
||||
|
||||
tasks.put(FINISH_ELECTION_NOT_MASTER_TASK, joinProcessedListener);
|
||||
final String source = "zen-disco-election-stop [" + reason + "]";
|
||||
tasks.put(FINISH_ELECTION_TASK, electionFinishedListener);
|
||||
clusterService.submitStateUpdateTasks(source, tasks, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor);
|
||||
}
|
||||
|
||||
|
@ -327,12 +327,15 @@ public class NodeJoinController extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
private final ClusterStateTaskListener joinProcessedListener = new ClusterStateTaskListener() {
|
||||
private final ClusterStateTaskListener electionFinishedListener = new ClusterStateTaskListener() {
|
||||
|
||||
@Override
|
||||
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
|
||||
assert newState.nodes().isLocalNodeElectedMaster() : "should have become a master but isn't " + newState.prettyPrint();
|
||||
onElectedAsMaster(newState);
|
||||
if (newState.nodes().isLocalNodeElectedMaster()) {
|
||||
ElectionContext.this.onElectedAsMaster(newState);
|
||||
} else {
|
||||
onFailure(source, new NotMasterException("election stopped [" + source + "]"));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -379,7 +382,9 @@ public class NodeJoinController extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
// a task indicated that the current node should become master, if no current master is known
|
||||
/**
|
||||
* a task indicated that the current node should become master, if no current master is known
|
||||
*/
|
||||
private static final DiscoveryNode BECOME_MASTER_TASK = new DiscoveryNode("_BECOME_MASTER_TASK_", LocalTransportAddress.buildUnique(),
|
||||
Collections.emptyMap(), Collections.emptySet(), Version.CURRENT) {
|
||||
@Override
|
||||
|
@ -388,9 +393,11 @@ public class NodeJoinController extends AbstractComponent {
|
|||
}
|
||||
};
|
||||
|
||||
// a task that is used to process pending joins without explicitly becoming a master and listening to the results
|
||||
// this task is used when election is stop without the local node becoming a master per se (though it might
|
||||
private static final DiscoveryNode FINISH_ELECTION_NOT_MASTER_TASK = new DiscoveryNode("_NOT_MASTER_TASK_",
|
||||
/**
|
||||
* a task that is used to signal the election is stopped and we should process pending joins.
|
||||
* it may be use in combination with {@link #BECOME_MASTER_TASK}
|
||||
*/
|
||||
private static final DiscoveryNode FINISH_ELECTION_TASK = new DiscoveryNode("_FINISH_ELECTION_",
|
||||
LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT) {
|
||||
@Override
|
||||
public String toString() {
|
||||
|
@ -402,31 +409,35 @@ public class NodeJoinController extends AbstractComponent {
|
|||
|
||||
@Override
|
||||
public BatchResult<DiscoveryNode> execute(ClusterState currentState, List<DiscoveryNode> joiningNodes) throws Exception {
|
||||
final DiscoveryNodes currentNodes = currentState.nodes();
|
||||
final BatchResult.Builder<DiscoveryNode> results = BatchResult.builder();
|
||||
|
||||
final DiscoveryNodes currentNodes = currentState.nodes();
|
||||
boolean nodesChanged = false;
|
||||
ClusterState.Builder newState = ClusterState.builder(currentState);
|
||||
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentNodes);
|
||||
|
||||
if (currentNodes.getMasterNode() == null && joiningNodes.contains(BECOME_MASTER_TASK)) {
|
||||
if (joiningNodes.size() == 1 && joiningNodes.get(0).equals(FINISH_ELECTION_TASK)) {
|
||||
return results.successes(joiningNodes).build(currentState);
|
||||
} else if (currentNodes.getMasterNode() == null && joiningNodes.contains(BECOME_MASTER_TASK)) {
|
||||
assert joiningNodes.contains(FINISH_ELECTION_TASK) : "becoming a master but election is not finished " + joiningNodes;
|
||||
// use these joins to try and become the master.
|
||||
// Note that we don't have to do any validation of the amount of joining nodes - the commit
|
||||
// during the cluster state publishing guarantees that we have enough
|
||||
|
||||
nodesBuilder.masterNodeId(currentNodes.getLocalNodeId());
|
||||
ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks())
|
||||
.removeGlobalBlock(discoverySettings.getNoMasterBlock()).build();
|
||||
newState.blocks(clusterBlocks);
|
||||
nodesChanged = true;
|
||||
}
|
||||
|
||||
if (nodesBuilder.isLocalNodeElectedMaster() == false) {
|
||||
} else if (nodesBuilder.isLocalNodeElectedMaster() == false) {
|
||||
logger.trace("processing node joins, but we are not the master. current master: {}", currentNodes.getMasterNode());
|
||||
throw new NotMasterException("Node [" + currentNodes.getLocalNode() + "] not master for join request");
|
||||
}
|
||||
|
||||
assert nodesBuilder.isLocalNodeElectedMaster();
|
||||
|
||||
// processing any joins
|
||||
for (final DiscoveryNode node : joiningNodes) {
|
||||
if (node.equals(BECOME_MASTER_TASK) || node.equals(FINISH_ELECTION_NOT_MASTER_TASK)) {
|
||||
if (node.equals(BECOME_MASTER_TASK) || node.equals(FINISH_ELECTION_TASK)) {
|
||||
// noop
|
||||
} else if (currentNodes.nodeExists(node)) {
|
||||
logger.debug("received a join request for an existing node [{}]", node);
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -394,10 +395,12 @@ final class StoreRecovery {
|
|||
translogState.totalOperationsOnStart(0);
|
||||
indexShard.prepareForIndexRecovery();
|
||||
ShardId snapshotShardId = shardId;
|
||||
if (!shardId.getIndexName().equals(restoreSource.index())) {
|
||||
snapshotShardId = new ShardId(restoreSource.index(), IndexMetaData.INDEX_UUID_NA_VALUE, shardId.id());
|
||||
final String indexName = restoreSource.index();
|
||||
if (!shardId.getIndexName().equals(indexName)) {
|
||||
snapshotShardId = new ShardId(indexName, IndexMetaData.INDEX_UUID_NA_VALUE, shardId.id());
|
||||
}
|
||||
repository.restoreShard(indexShard, restoreSource.snapshot().getSnapshotId(), restoreSource.version(), snapshotShardId, indexShard.recoveryState());
|
||||
final IndexId indexId = repository.getRepositoryData().resolveIndexId(indexName);
|
||||
repository.restoreShard(indexShard, restoreSource.snapshot().getSnapshotId(), restoreSource.version(), indexId, snapshotShardId, indexShard.recoveryState());
|
||||
indexShard.skipTranslogRecovery();
|
||||
indexShard.finalizeRecovery();
|
||||
indexShard.postRecovery("restore done");
|
||||
|
|
|
@ -113,8 +113,7 @@ public class TranslogReader extends BaseTranslogReader implements Closeable {
|
|||
headerStream.read(ref.bytes, ref.offset, ref.length);
|
||||
BytesRef uuidBytes = new BytesRef(translogUUID);
|
||||
if (uuidBytes.bytesEquals(ref) == false) {
|
||||
throw new TranslogCorruptedException("expected shard UUID " + uuidBytes + "/" + uuidBytes.utf8ToString() +
|
||||
" but got: " + ref + "/" + ref.utf8ToString() +
|
||||
throw new TranslogCorruptedException("expected shard UUID " + uuidBytes + " but got: " + ref +
|
||||
" this translog file belongs to a different translog. path:" + path);
|
||||
}
|
||||
return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + Integer.BYTES, checkpoint.offset, checkpoint.numOps);
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Represents a single snapshotted index in the repository.
|
||||
*/
|
||||
public final class IndexId implements Writeable, ToXContent {
|
||||
protected static final String NAME = "name";
|
||||
protected static final String ID = "id";
|
||||
|
||||
private final String name;
|
||||
private final String id;
|
||||
|
||||
public IndexId(final String name, final String id) {
|
||||
this.name = name;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public IndexId(final StreamInput in) throws IOException {
|
||||
this.name = in.readString();
|
||||
this.id = in.readString();
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the index.
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* The unique ID for the index within the repository. This is *not* the same as the
|
||||
* index's UUID, but merely a unique file/URL friendly identifier that a repository can
|
||||
* use to name blobs for the index.
|
||||
*
|
||||
* We could not use the index's actual UUID (See {@link Index#getUUID()}) because in the
|
||||
* case of snapshot/restore, the index UUID in the snapshotted index will be different
|
||||
* from the index UUID assigned to it when it is restored. Hence, the actual index UUID
|
||||
* is not useful in the context of snapshot/restore for tying a snapshotted index to the
|
||||
* index it was snapshot from, and so we are using a separate UUID here.
|
||||
*/
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + name + "/" + id + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked") IndexId that = (IndexId) o;
|
||||
return Objects.equals(name, that.name) && Objects.equals(id, that.id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeString(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(NAME, name);
|
||||
builder.field(ID, id);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -47,7 +47,7 @@ import java.util.List;
|
|||
* <ul>
|
||||
* <li>Master calls {@link #initializeSnapshot(SnapshotId, List, org.elasticsearch.cluster.metadata.MetaData)}
|
||||
* with list of indices that will be included into the snapshot</li>
|
||||
* <li>Data nodes call {@link Repository#snapshotShard(IndexShard, SnapshotId, IndexCommit, IndexShardSnapshotStatus)}
|
||||
* <li>Data nodes call {@link Repository#snapshotShard(IndexShard, SnapshotId, IndexId, IndexCommit, IndexShardSnapshotStatus)}
|
||||
* for each shard</li>
|
||||
* <li>When all shard calls return master calls {@link #finalizeSnapshot} with possible list of failures</li>
|
||||
* </ul>
|
||||
|
@ -88,15 +88,14 @@ public interface Repository extends LifecycleComponent {
|
|||
* @param indices list of indices
|
||||
* @return information about snapshot
|
||||
*/
|
||||
MetaData getSnapshotMetaData(SnapshotInfo snapshot, List<String> indices) throws IOException;
|
||||
MetaData getSnapshotMetaData(SnapshotInfo snapshot, List<IndexId> indices) throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the list of snapshots currently stored in the repository that match the given predicate on the snapshot name.
|
||||
* To get all snapshots, the predicate filter should return true regardless of the input.
|
||||
*
|
||||
* @return snapshot list
|
||||
* Returns a {@link RepositoryData} to describe the data in the repository, including the snapshots
|
||||
* and the indices across all snapshots found in the repository. Throws a {@link RepositoryException}
|
||||
* if there was an error in reading the data.
|
||||
*/
|
||||
List<SnapshotId> getSnapshots();
|
||||
RepositoryData getRepositoryData();
|
||||
|
||||
/**
|
||||
* Starts snapshotting process
|
||||
|
@ -105,7 +104,7 @@ public interface Repository extends LifecycleComponent {
|
|||
* @param indices list of indices to be snapshotted
|
||||
* @param metaData cluster metadata
|
||||
*/
|
||||
void initializeSnapshot(SnapshotId snapshotId, List<String> indices, MetaData metaData);
|
||||
void initializeSnapshot(SnapshotId snapshotId, List<IndexId> indices, MetaData metaData);
|
||||
|
||||
/**
|
||||
* Finalizes snapshotting process
|
||||
|
@ -113,12 +112,14 @@ public interface Repository extends LifecycleComponent {
|
|||
* This method is called on master after all shards are snapshotted.
|
||||
*
|
||||
* @param snapshotId snapshot id
|
||||
* @param indices list of indices in the snapshot
|
||||
* @param startTime start time of the snapshot
|
||||
* @param failure global failure reason or null
|
||||
* @param totalShards total number of shards
|
||||
* @param shardFailures list of shard failures
|
||||
* @return snapshot description
|
||||
*/
|
||||
SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List<String> indices, long startTime, String failure, int totalShards, List<SnapshotShardFailure> shardFailures);
|
||||
SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List<IndexId> indices, long startTime, String failure, int totalShards, List<SnapshotShardFailure> shardFailures);
|
||||
|
||||
/**
|
||||
* Deletes snapshot
|
||||
|
@ -181,10 +182,11 @@ public interface Repository extends LifecycleComponent {
|
|||
*
|
||||
* @param shard shard to be snapshotted
|
||||
* @param snapshotId snapshot id
|
||||
* @param indexId id for the index being snapshotted
|
||||
* @param snapshotIndexCommit commit point
|
||||
* @param snapshotStatus snapshot status
|
||||
*/
|
||||
void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus);
|
||||
void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus);
|
||||
|
||||
/**
|
||||
* Restores snapshot of the shard.
|
||||
|
@ -194,20 +196,22 @@ public interface Repository extends LifecycleComponent {
|
|||
* @param shard the shard to restore the index into
|
||||
* @param snapshotId snapshot id
|
||||
* @param version version of elasticsearch that created this snapshot
|
||||
* @param indexId id of the index in the repository from which the restore is occurring
|
||||
* @param snapshotShardId shard id (in the snapshot)
|
||||
* @param recoveryState recovery state
|
||||
*/
|
||||
void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState);
|
||||
void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState);
|
||||
|
||||
/**
|
||||
* Retrieve shard snapshot status for the stored snapshot
|
||||
*
|
||||
* @param snapshotId snapshot id
|
||||
* @param version version of elasticsearch that created this snapshot
|
||||
* @param indexId the snapshotted index id for the shard to get status for
|
||||
* @param shardId shard id
|
||||
* @return snapshot status
|
||||
*/
|
||||
IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, ShardId shardId);
|
||||
IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId);
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,311 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A class that represents the data in a repository, as captured in the
|
||||
* repository's index blob.
|
||||
*/
|
||||
public final class RepositoryData implements ToXContent {
|
||||
|
||||
public static final RepositoryData EMPTY = new RepositoryData(Collections.emptyList(), Collections.emptyMap());
|
||||
|
||||
/**
|
||||
* The ids of the snapshots in the repository.
|
||||
*/
|
||||
private final List<SnapshotId> snapshotIds;
|
||||
/**
|
||||
* The indices found in the repository across all snapshots, as a name to {@link IndexId} mapping
|
||||
*/
|
||||
private final Map<String, IndexId> indices;
|
||||
/**
|
||||
* The snapshots that each index belongs to.
|
||||
*/
|
||||
private final Map<IndexId, Set<SnapshotId>> indexSnapshots;
|
||||
|
||||
public RepositoryData(List<SnapshotId> snapshotIds, Map<IndexId, Set<SnapshotId>> indexSnapshots) {
|
||||
this.snapshotIds = Collections.unmodifiableList(snapshotIds);
|
||||
this.indices = Collections.unmodifiableMap(indexSnapshots.keySet()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(IndexId::getName, Function.identity())));
|
||||
this.indexSnapshots = Collections.unmodifiableMap(indexSnapshots);
|
||||
}
|
||||
|
||||
protected RepositoryData copy() {
|
||||
return new RepositoryData(snapshotIds, indexSnapshots);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an unmodifiable list of the snapshot ids.
|
||||
*/
|
||||
public List<SnapshotId> getSnapshotIds() {
|
||||
return snapshotIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an unmodifiable map of the index names to {@link IndexId} in the repository.
|
||||
*/
|
||||
public Map<String, IndexId> getIndices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a snapshot and its indices to the repository; returns a new instance. If the snapshot
|
||||
* already exists in the repository data, this method throws an IllegalArgumentException.
|
||||
*/
|
||||
public RepositoryData addSnapshot(final SnapshotId snapshotId, final List<IndexId> snapshottedIndices) {
|
||||
if (snapshotIds.contains(snapshotId)) {
|
||||
throw new IllegalArgumentException("[" + snapshotId + "] already exists in the repository data");
|
||||
}
|
||||
List<SnapshotId> snapshots = new ArrayList<>(snapshotIds);
|
||||
snapshots.add(snapshotId);
|
||||
Map<IndexId, Set<SnapshotId>> allIndexSnapshots = new HashMap<>(indexSnapshots);
|
||||
for (final IndexId indexId : snapshottedIndices) {
|
||||
if (allIndexSnapshots.containsKey(indexId)) {
|
||||
Set<SnapshotId> ids = allIndexSnapshots.get(indexId);
|
||||
if (ids == null) {
|
||||
ids = new LinkedHashSet<>();
|
||||
allIndexSnapshots.put(indexId, ids);
|
||||
}
|
||||
ids.add(snapshotId);
|
||||
} else {
|
||||
Set<SnapshotId> ids = new LinkedHashSet<>();
|
||||
ids.add(snapshotId);
|
||||
allIndexSnapshots.put(indexId, ids);
|
||||
}
|
||||
}
|
||||
return new RepositoryData(snapshots, allIndexSnapshots);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the indices in the repository metadata; returns a new instance.
|
||||
*/
|
||||
public RepositoryData initIndices(final Map<IndexId, Set<SnapshotId>> indexSnapshots) {
|
||||
return new RepositoryData(snapshotIds, indexSnapshots);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a snapshot and remove any indices that no longer exist in the repository due to the deletion of the snapshot.
|
||||
*/
|
||||
public RepositoryData removeSnapshot(final SnapshotId snapshotId) {
|
||||
List<SnapshotId> newSnapshotIds = snapshotIds
|
||||
.stream()
|
||||
.filter(id -> snapshotId.equals(id) == false)
|
||||
.collect(Collectors.toList());
|
||||
Map<IndexId, Set<SnapshotId>> indexSnapshots = new HashMap<>();
|
||||
for (final IndexId indexId : indices.values()) {
|
||||
Set<SnapshotId> set;
|
||||
Set<SnapshotId> snapshotIds = this.indexSnapshots.get(indexId);
|
||||
assert snapshotIds != null;
|
||||
if (snapshotIds.contains(snapshotId)) {
|
||||
if (snapshotIds.size() == 1) {
|
||||
// removing the snapshot will mean no more snapshots have this index, so just skip over it
|
||||
continue;
|
||||
}
|
||||
set = new LinkedHashSet<>(snapshotIds);
|
||||
set.remove(snapshotId);
|
||||
} else {
|
||||
set = snapshotIds;
|
||||
}
|
||||
indexSnapshots.put(indexId, set);
|
||||
}
|
||||
|
||||
return new RepositoryData(newSnapshotIds, indexSnapshots);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an immutable collection of the snapshot ids for the snapshots that contain the given index.
|
||||
*/
|
||||
public Set<SnapshotId> getSnapshots(final IndexId indexId) {
|
||||
Set<SnapshotId> snapshotIds = indexSnapshots.get(indexId);
|
||||
if (snapshotIds == null) {
|
||||
throw new IllegalArgumentException("unknown snapshot index " + indexId + "");
|
||||
}
|
||||
return snapshotIds;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked") RepositoryData that = (RepositoryData) obj;
|
||||
return snapshotIds.equals(that.snapshotIds)
|
||||
&& indices.equals(that.indices)
|
||||
&& indexSnapshots.equals(that.indexSnapshots);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(snapshotIds, indices, indexSnapshots);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the index name to the index id specific to the repository,
|
||||
* throwing an exception if the index could not be resolved.
|
||||
*/
|
||||
public IndexId resolveIndexId(final String indexName) {
|
||||
if (indices.containsKey(indexName)) {
|
||||
return indices.get(indexName);
|
||||
} else {
|
||||
// on repositories created before 5.0, there was no indices information in the index
|
||||
// blob, so if the repository hasn't been updated with new snapshots, no new index blob
|
||||
// would have been written, so we only have old snapshots without the index information.
|
||||
// in this case, the index id is just the index name
|
||||
return new IndexId(indexName, indexName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the given index names to index ids.
|
||||
*/
|
||||
public List<IndexId> resolveIndices(final List<String> indices) {
|
||||
List<IndexId> resolvedIndices = new ArrayList<>(indices.size());
|
||||
for (final String indexName : indices) {
|
||||
resolvedIndices.add(resolveIndexId(indexName));
|
||||
}
|
||||
return resolvedIndices;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the given index names to index ids, creating new index ids for
|
||||
* new indices in the repository.
|
||||
*/
|
||||
public List<IndexId> resolveNewIndices(final List<String> indicesToResolve) {
|
||||
List<IndexId> snapshotIndices = new ArrayList<>();
|
||||
for (String index : indicesToResolve) {
|
||||
final IndexId indexId;
|
||||
if (indices.containsKey(index)) {
|
||||
indexId = indices.get(index);
|
||||
} else {
|
||||
indexId = new IndexId(index, UUIDs.randomBase64UUID());
|
||||
}
|
||||
snapshotIndices.add(indexId);
|
||||
}
|
||||
return snapshotIndices;
|
||||
}
|
||||
|
||||
private static final String SNAPSHOTS = "snapshots";
|
||||
private static final String INDICES = "indices";
|
||||
private static final String INDEX_ID = "id";
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||
builder.startObject();
|
||||
// write the snapshots list
|
||||
builder.startArray(SNAPSHOTS);
|
||||
for (final SnapshotId snapshot : getSnapshotIds()) {
|
||||
snapshot.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
// write the indices map
|
||||
builder.startObject(INDICES);
|
||||
for (final IndexId indexId : getIndices().values()) {
|
||||
builder.startObject(indexId.getName());
|
||||
builder.field(INDEX_ID, indexId.getId());
|
||||
builder.startArray(SNAPSHOTS);
|
||||
Set<SnapshotId> snapshotIds = indexSnapshots.get(indexId);
|
||||
assert snapshotIds != null;
|
||||
for (final SnapshotId snapshotId : snapshotIds) {
|
||||
snapshotId.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static RepositoryData fromXContent(final XContentParser parser) throws IOException {
|
||||
List<SnapshotId> snapshots = new ArrayList<>();
|
||||
Map<IndexId, Set<SnapshotId>> indexSnapshots = new HashMap<>();
|
||||
if (parser.nextToken() == XContentParser.Token.START_OBJECT) {
|
||||
while (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||
String currentFieldName = parser.currentName();
|
||||
if (SNAPSHOTS.equals(currentFieldName)) {
|
||||
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
snapshots.add(SnapshotId.fromXContent(parser));
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("expected array for [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (INDICES.equals(currentFieldName)) {
|
||||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("start object expected [indices]");
|
||||
}
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
String indexName = parser.currentName();
|
||||
String indexId = null;
|
||||
Set<SnapshotId> snapshotIds = new LinkedHashSet<>();
|
||||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("start object expected index[" + indexName + "]");
|
||||
}
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
String indexMetaFieldName = parser.currentName();
|
||||
parser.nextToken();
|
||||
if (INDEX_ID.equals(indexMetaFieldName)) {
|
||||
indexId = parser.text();
|
||||
} else if (SNAPSHOTS.equals(indexMetaFieldName)) {
|
||||
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
||||
throw new ElasticsearchParseException("start array expected [snapshots]");
|
||||
}
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
snapshotIds.add(SnapshotId.fromXContent(parser));
|
||||
}
|
||||
}
|
||||
}
|
||||
assert indexId != null;
|
||||
indexSnapshots.put(new IndexId(indexName, indexId), snapshotIds);
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("unknown field name [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("start object expected");
|
||||
}
|
||||
return new RepositoryData(snapshots, indexSnapshots);
|
||||
}
|
||||
|
||||
}
|
|
@ -45,6 +45,8 @@ import org.elasticsearch.common.lucene.Lucene;
|
|||
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException;
|
||||
import org.elasticsearch.index.snapshots.IndexShardSnapshotException;
|
||||
|
@ -58,6 +60,8 @@ import org.elasticsearch.index.snapshots.blobstore.SnapshotFiles;
|
|||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.index.store.StoreFileMetaData;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.repositories.RepositoryData;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -103,6 +107,7 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
@ -119,14 +124,14 @@ import static java.util.Collections.unmodifiableMap;
|
|||
* {@code
|
||||
* STORE_ROOT
|
||||
* |- index-N - list of all snapshot name as JSON array, N is the generation of the file
|
||||
* |- index-latest - contains the numeric value of the latest generation of the index file (i.e. N from above)
|
||||
* |- snapshot-20131010 - JSON serialized Snapshot for snapshot "20131010"
|
||||
* |- index.latest - contains the numeric value of the latest generation of the index file (i.e. N from above)
|
||||
* |- snap-20131010 - JSON serialized Snapshot for snapshot "20131010"
|
||||
* |- meta-20131010.dat - JSON serialized MetaData for snapshot "20131010" (includes only global metadata)
|
||||
* |- snapshot-20131011 - JSON serialized Snapshot for snapshot "20131011"
|
||||
* |- snap-20131011 - JSON serialized Snapshot for snapshot "20131011"
|
||||
* |- meta-20131011.dat - JSON serialized MetaData for snapshot "20131011"
|
||||
* .....
|
||||
* |- indices/ - data for all indices
|
||||
* |- foo/ - data for index "foo"
|
||||
* |- Ac1342-B_x/ - data for index "foo" which was assigned the unique id of Ac1342-B_x in the repository
|
||||
* | |- meta-20131010.dat - JSON Serialized IndexMetaData for index "foo"
|
||||
* | |- 0/ - data for shard "0" of index "foo"
|
||||
* | | |- __1 \
|
||||
|
@ -146,7 +151,7 @@ import static java.util.Collections.unmodifiableMap;
|
|||
* | |-2/
|
||||
* | ......
|
||||
* |
|
||||
* |- bar/ - data for index bar
|
||||
* |- 1xB0D8_B3y/ - data for index "bar" which was assigned the unique id of 1xB0D8_B3y in the repository
|
||||
* ......
|
||||
* }
|
||||
* </pre>
|
||||
|
@ -163,13 +168,13 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
|
||||
private static final String SNAPSHOT_PREFIX = "snap-";
|
||||
|
||||
protected static final String SNAPSHOT_CODEC = "snapshot";
|
||||
private static final String SNAPSHOT_CODEC = "snapshot";
|
||||
|
||||
static final String SNAPSHOTS_FILE = "index"; // package private for unit testing
|
||||
|
||||
private static final String SNAPSHOTS_FILE_PREFIX = "index-";
|
||||
private static final String INDEX_FILE_PREFIX = "index-";
|
||||
|
||||
private static final String SNAPSHOTS_INDEX_LATEST_BLOB = "index.latest";
|
||||
private static final String INDEX_LATEST_BLOB = "index.latest";
|
||||
|
||||
private static final String TESTS_FILE = "tests-";
|
||||
|
||||
|
@ -305,7 +310,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
|
||||
@Override
|
||||
public void initializeSnapshot(SnapshotId snapshotId, List<String> indices, MetaData clusterMetadata) {
|
||||
public void initializeSnapshot(SnapshotId snapshotId, List<IndexId> indices, MetaData clusterMetaData) {
|
||||
if (isReadOnly()) {
|
||||
throw new RepositoryException(metadata.name(), "cannot create snapshot in a readonly repository");
|
||||
}
|
||||
|
@ -315,28 +320,69 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
if (getSnapshots().stream().anyMatch(s -> s.getName().equals(snapshotName))) {
|
||||
throw new SnapshotCreationException(metadata.name(), snapshotId, "snapshot with the same name already exists");
|
||||
}
|
||||
if (snapshotFormat.exists(snapshotsBlobContainer, blobId(snapshotId)) ||
|
||||
if (snapshotFormat.exists(snapshotsBlobContainer, snapshotId.getUUID()) ||
|
||||
snapshotLegacyFormat.exists(snapshotsBlobContainer, snapshotName)) {
|
||||
throw new SnapshotCreationException(metadata.name(), snapshotId, "snapshot with such name already exists");
|
||||
}
|
||||
|
||||
// Write Global MetaData
|
||||
globalMetaDataFormat.write(clusterMetadata, snapshotsBlobContainer, snapshotName);
|
||||
for (String index : indices) {
|
||||
final IndexMetaData indexMetaData = clusterMetadata.index(index);
|
||||
final BlobPath indexPath = basePath().add("indices").add(index);
|
||||
globalMetaDataFormat.write(clusterMetaData, snapshotsBlobContainer, snapshotId.getUUID());
|
||||
|
||||
// write the index metadata for each index in the snapshot
|
||||
for (IndexId index : indices) {
|
||||
final IndexMetaData indexMetaData = clusterMetaData.index(index.getName());
|
||||
final BlobPath indexPath = basePath().add("indices").add(index.getId());
|
||||
final BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
|
||||
indexMetaDataFormat.write(indexMetaData, indexMetaDataBlobContainer, snapshotName);
|
||||
indexMetaDataFormat.write(indexMetaData, indexMetaDataBlobContainer, snapshotId.getUUID());
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new SnapshotCreationException(metadata.name(), snapshotId, ex);
|
||||
}
|
||||
}
|
||||
|
||||
// Older repository index files (index-N) only contain snapshot info, not indices info,
|
||||
// so if the repository data is of the older format, populate it with the indices entries
|
||||
// so we know which indices of snapshots have blob ids in the older format.
|
||||
private RepositoryData upgradeRepositoryData(final RepositoryData repositoryData) throws IOException {
|
||||
final Map<IndexId, Set<SnapshotId>> indexToSnapshots = new HashMap<>();
|
||||
for (final SnapshotId snapshotId : repositoryData.getSnapshotIds()) {
|
||||
final SnapshotInfo snapshotInfo;
|
||||
try {
|
||||
snapshotInfo = getSnapshotInfo(snapshotId);
|
||||
} catch (SnapshotException e) {
|
||||
logger.warn("[{}] repository is on a pre-5.0 format with an index file that contains snapshot [{}] but " +
|
||||
"the corresponding snap-{}.dat file cannot be read. The snapshot will no longer be included in " +
|
||||
"the repository but its data directories will remain.", e, getMetadata().name(),
|
||||
snapshotId, snapshotId.getUUID());
|
||||
continue;
|
||||
}
|
||||
for (final String indexName : snapshotInfo.indices()) {
|
||||
final IndexId indexId = new IndexId(indexName, indexName);
|
||||
if (indexToSnapshots.containsKey(indexId)) {
|
||||
indexToSnapshots.get(indexId).add(snapshotId);
|
||||
} else {
|
||||
indexToSnapshots.put(indexId, Sets.newHashSet(snapshotId));
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
final RepositoryData updatedRepoData = repositoryData.initIndices(indexToSnapshots);
|
||||
if (isReadOnly() == false) {
|
||||
// write the new index gen file with the indices included
|
||||
writeIndexGen(updatedRepoData);
|
||||
}
|
||||
return updatedRepoData;
|
||||
} catch (IOException e) {
|
||||
throw new RepositoryException(metadata.name(), "failed to update the repository index blob with indices data on startup", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteSnapshot(SnapshotId snapshotId) {
|
||||
if (isReadOnly()) {
|
||||
throw new RepositoryException(metadata.name(), "cannot delete snapshot from a readonly repository");
|
||||
}
|
||||
final RepositoryData repositoryData = getRepositoryData();
|
||||
List<String> indices = Collections.emptyList();
|
||||
SnapshotInfo snapshot = null;
|
||||
try {
|
||||
|
@ -350,36 +396,29 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
MetaData metaData = null;
|
||||
try {
|
||||
if (snapshot != null) {
|
||||
metaData = readSnapshotMetaData(snapshotId, snapshot.version(), indices, true);
|
||||
metaData = readSnapshotMetaData(snapshotId, snapshot.version(), repositoryData.resolveIndices(indices), true);
|
||||
} else {
|
||||
metaData = readSnapshotMetaData(snapshotId, null, indices, true);
|
||||
metaData = readSnapshotMetaData(snapshotId, null, repositoryData.resolveIndices(indices), true);
|
||||
}
|
||||
} catch (IOException | SnapshotException ex) {
|
||||
logger.warn("cannot read metadata for snapshot [{}]", ex, snapshotId);
|
||||
}
|
||||
try {
|
||||
final String snapshotName = snapshotId.getName();
|
||||
// Delete snapshot file first so we wouldn't end up with partially deleted snapshot that looks OK
|
||||
if (snapshot != null) {
|
||||
snapshotFormat(snapshot.version()).delete(snapshotsBlobContainer, blobId(snapshotId));
|
||||
globalMetaDataFormat(snapshot.version()).delete(snapshotsBlobContainer, snapshotName);
|
||||
} else {
|
||||
// We don't know which version was the snapshot created with - try deleting both current and legacy formats
|
||||
snapshotFormat.delete(snapshotsBlobContainer, blobId(snapshotId));
|
||||
snapshotLegacyFormat.delete(snapshotsBlobContainer, snapshotName);
|
||||
globalMetaDataLegacyFormat.delete(snapshotsBlobContainer, snapshotName);
|
||||
globalMetaDataFormat.delete(snapshotsBlobContainer, snapshotName);
|
||||
}
|
||||
// Delete snapshot from the snapshot list
|
||||
List<SnapshotId> snapshotIds = getSnapshots().stream().filter(id -> snapshotId.equals(id) == false).collect(Collectors.toList());
|
||||
writeSnapshotsToIndexGen(snapshotIds);
|
||||
// Delete snapshot from the index file, since it is the maintainer of truth of active snapshots
|
||||
writeIndexGen(repositoryData.removeSnapshot(snapshotId));
|
||||
|
||||
// delete the snapshot file
|
||||
safeSnapshotBlobDelete(snapshot, snapshotId.getUUID());
|
||||
// delete the global metadata file
|
||||
safeGlobalMetaDataBlobDelete(snapshot, snapshotId.getUUID());
|
||||
|
||||
// Now delete all indices
|
||||
for (String index : indices) {
|
||||
BlobPath indexPath = basePath().add("indices").add(index);
|
||||
final IndexId indexId = repositoryData.resolveIndexId(index);
|
||||
BlobPath indexPath = basePath().add("indices").add(indexId.getId());
|
||||
BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
|
||||
try {
|
||||
indexMetaDataFormat(snapshot.version()).delete(indexMetaDataBlobContainer, snapshotId.getName());
|
||||
indexMetaDataFormat(snapshot.version()).delete(indexMetaDataBlobContainer, snapshotId.getUUID());
|
||||
} catch (IOException ex) {
|
||||
logger.warn("[{}] failed to delete metadata for index [{}]", ex, snapshotId, index);
|
||||
}
|
||||
|
@ -388,7 +427,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
if (indexMetaData != null) {
|
||||
for (int shardId = 0; shardId < indexMetaData.getNumberOfShards(); shardId++) {
|
||||
try {
|
||||
delete(snapshotId, snapshot.version(), new ShardId(indexMetaData.getIndex(), shardId));
|
||||
delete(snapshotId, snapshot.version(), indexId, new ShardId(indexMetaData.getIndex(), shardId));
|
||||
} catch (SnapshotException ex) {
|
||||
logger.warn("[{}] failed to delete shard data for shard [{}][{}]", ex, snapshotId, index, shardId);
|
||||
}
|
||||
|
@ -401,28 +440,77 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
|
||||
private void safeSnapshotBlobDelete(final SnapshotInfo snapshotInfo, final String blobId) {
|
||||
if (snapshotInfo != null) {
|
||||
// we know the version the snapshot was created with
|
||||
try {
|
||||
snapshotFormat(snapshotInfo.version()).delete(snapshotsBlobContainer, blobId);
|
||||
} catch (IOException e) {
|
||||
logger.warn("[{}] Unable to delete snapshot file [{}]", e, snapshotInfo.snapshotId(), blobId);
|
||||
}
|
||||
} else {
|
||||
// we don't know the version, first try the current format, then the legacy format
|
||||
try {
|
||||
snapshotFormat.delete(snapshotsBlobContainer, blobId);
|
||||
} catch (IOException e) {
|
||||
// now try legacy format
|
||||
try {
|
||||
snapshotLegacyFormat.delete(snapshotsBlobContainer, blobId);
|
||||
} catch (IOException e2) {
|
||||
// neither snapshot file could be deleted, log the error
|
||||
logger.warn("Unable to delete snapshot file [{}]", e, blobId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void safeGlobalMetaDataBlobDelete(final SnapshotInfo snapshotInfo, final String blobId) {
|
||||
if (snapshotInfo != null) {
|
||||
// we know the version the snapshot was created with
|
||||
try {
|
||||
globalMetaDataFormat(snapshotInfo.version()).delete(snapshotsBlobContainer, blobId);
|
||||
} catch (IOException e) {
|
||||
logger.warn("[{}] Unable to delete global metadata file [{}]", e, snapshotInfo.snapshotId(), blobId);
|
||||
}
|
||||
} else {
|
||||
// we don't know the version, first try the current format, then the legacy format
|
||||
try {
|
||||
globalMetaDataFormat.delete(snapshotsBlobContainer, blobId);
|
||||
} catch (IOException e) {
|
||||
// now try legacy format
|
||||
try {
|
||||
globalMetaDataLegacyFormat.delete(snapshotsBlobContainer, blobId);
|
||||
} catch (IOException e2) {
|
||||
// neither global metadata file could be deleted, log the error
|
||||
logger.warn("Unable to delete global metadata file [{}]", e, blobId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public SnapshotInfo finalizeSnapshot(final SnapshotId snapshotId,
|
||||
final List<String> indices,
|
||||
final List<IndexId> indices,
|
||||
final long startTime,
|
||||
final String failure,
|
||||
final int totalShards,
|
||||
final List<SnapshotShardFailure> shardFailures) {
|
||||
try {
|
||||
SnapshotInfo blobStoreSnapshot = new SnapshotInfo(snapshotId,
|
||||
indices,
|
||||
indices.stream().map(IndexId::getName).collect(Collectors.toList()),
|
||||
startTime,
|
||||
failure,
|
||||
System.currentTimeMillis(),
|
||||
totalShards,
|
||||
shardFailures);
|
||||
snapshotFormat.write(blobStoreSnapshot, snapshotsBlobContainer, blobId(snapshotId));
|
||||
List<SnapshotId> snapshotIds = getSnapshots();
|
||||
snapshotFormat.write(blobStoreSnapshot, snapshotsBlobContainer, snapshotId.getUUID());
|
||||
final RepositoryData repositoryData = getRepositoryData();
|
||||
List<SnapshotId> snapshotIds = repositoryData.getSnapshotIds();
|
||||
if (!snapshotIds.contains(snapshotId)) {
|
||||
snapshotIds = new ArrayList<>(snapshotIds);
|
||||
snapshotIds.add(snapshotId);
|
||||
snapshotIds = Collections.unmodifiableList(snapshotIds);
|
||||
writeSnapshotsToIndexGen(snapshotIds);
|
||||
writeIndexGen(repositoryData.addSnapshot(snapshotId, indices));
|
||||
}
|
||||
return blobStoreSnapshot;
|
||||
} catch (IOException ex) {
|
||||
|
@ -430,27 +518,19 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SnapshotId> getSnapshots() {
|
||||
try {
|
||||
return Collections.unmodifiableList(readSnapshotsFromIndex());
|
||||
} catch (NoSuchFileException | FileNotFoundException e) {
|
||||
// its a fresh repository, no index file exists, so return an empty list
|
||||
return Collections.emptyList();
|
||||
} catch (IOException ioe) {
|
||||
throw new RepositoryException(metadata.name(), "failed to list snapshots in repository", ioe);
|
||||
}
|
||||
return getRepositoryData().getSnapshotIds();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List<String> indices) throws IOException {
|
||||
public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List<IndexId> indices) throws IOException {
|
||||
return readSnapshotMetaData(snapshot.snapshotId(), snapshot.version(), indices, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SnapshotInfo getSnapshotInfo(final SnapshotId snapshotId) {
|
||||
try {
|
||||
return snapshotFormat.read(snapshotsBlobContainer, blobId(snapshotId));
|
||||
return snapshotFormat.read(snapshotsBlobContainer, snapshotId.getUUID());
|
||||
} catch (FileNotFoundException | NoSuchFileException ex) {
|
||||
// File is missing - let's try legacy format instead
|
||||
try {
|
||||
|
@ -465,13 +545,13 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
|
||||
private MetaData readSnapshotMetaData(SnapshotId snapshotId, Version snapshotVersion, List<String> indices, boolean ignoreIndexErrors) throws IOException {
|
||||
private MetaData readSnapshotMetaData(SnapshotId snapshotId, Version snapshotVersion, List<IndexId> indices, boolean ignoreIndexErrors) throws IOException {
|
||||
MetaData metaData;
|
||||
if (snapshotVersion == null) {
|
||||
// When we delete corrupted snapshots we might not know which version we are dealing with
|
||||
// We can try detecting the version based on the metadata file format
|
||||
assert ignoreIndexErrors;
|
||||
if (globalMetaDataFormat.exists(snapshotsBlobContainer, snapshotId.getName())) {
|
||||
if (globalMetaDataFormat.exists(snapshotsBlobContainer, snapshotId.getUUID())) {
|
||||
snapshotVersion = Version.CURRENT;
|
||||
} else if (globalMetaDataLegacyFormat.exists(snapshotsBlobContainer, snapshotId.getName())) {
|
||||
throw new SnapshotException(metadata.name(), snapshotId, "snapshot is too old");
|
||||
|
@ -480,21 +560,21 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
try {
|
||||
metaData = globalMetaDataFormat(snapshotVersion).read(snapshotsBlobContainer, snapshotId.getName());
|
||||
metaData = globalMetaDataFormat(snapshotVersion).read(snapshotsBlobContainer, snapshotId.getUUID());
|
||||
} catch (FileNotFoundException | NoSuchFileException ex) {
|
||||
throw new SnapshotMissingException(metadata.name(), snapshotId, ex);
|
||||
} catch (IOException ex) {
|
||||
throw new SnapshotException(metadata.name(), snapshotId, "failed to get snapshots", ex);
|
||||
}
|
||||
MetaData.Builder metaDataBuilder = MetaData.builder(metaData);
|
||||
for (String index : indices) {
|
||||
BlobPath indexPath = basePath().add("indices").add(index);
|
||||
for (IndexId index : indices) {
|
||||
BlobPath indexPath = basePath().add("indices").add(index.getId());
|
||||
BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
|
||||
try {
|
||||
metaDataBuilder.put(indexMetaDataFormat(snapshotVersion).read(indexMetaDataBlobContainer, snapshotId.getName()), false);
|
||||
metaDataBuilder.put(indexMetaDataFormat(snapshotVersion).read(indexMetaDataBlobContainer, snapshotId.getUUID()), false);
|
||||
} catch (ElasticsearchParseException | IOException ex) {
|
||||
if (ignoreIndexErrors) {
|
||||
logger.warn("[{}] [{}] failed to read metadata for index", ex, snapshotId, index);
|
||||
logger.warn("[{}] [{}] failed to read metadata for index", ex, snapshotId, index.getName());
|
||||
} else {
|
||||
throw ex;
|
||||
}
|
||||
|
@ -562,10 +642,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
|
||||
private static final String SNAPSHOTS = "snapshots";
|
||||
private static final String NAME = "name";
|
||||
private static final String UUID = "uuid";
|
||||
|
||||
@Override
|
||||
public long getSnapshotThrottleTimeInNanos() {
|
||||
return snapshotRateLimitingTimeInNanos.count();
|
||||
|
@ -609,6 +685,43 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public RepositoryData getRepositoryData() {
|
||||
try {
|
||||
final long indexGen = latestIndexBlobId();
|
||||
final String snapshotsIndexBlobName;
|
||||
final boolean legacyFormat;
|
||||
if (indexGen == -1) {
|
||||
// index-N file doesn't exist, either its a fresh repository, or its in the
|
||||
// old format, so look for the older index file before returning an empty list
|
||||
snapshotsIndexBlobName = SNAPSHOTS_FILE;
|
||||
legacyFormat = true;
|
||||
} else {
|
||||
snapshotsIndexBlobName = INDEX_FILE_PREFIX + Long.toString(indexGen);
|
||||
legacyFormat = false;
|
||||
}
|
||||
|
||||
RepositoryData repositoryData;
|
||||
try (InputStream blob = snapshotsBlobContainer.readBlob(snapshotsIndexBlobName)) {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
Streams.copy(blob, out);
|
||||
try (XContentParser parser = XContentHelper.createParser(out.bytes())) {
|
||||
repositoryData = RepositoryData.fromXContent(parser);
|
||||
}
|
||||
}
|
||||
if (legacyFormat) {
|
||||
// pre 5.0 repository data needs to be updated to include the indices
|
||||
repositoryData = upgradeRepositoryData(repositoryData);
|
||||
}
|
||||
return repositoryData;
|
||||
} catch (NoSuchFileException nsfe) {
|
||||
// repository doesn't have an index blob, its a new blank repo
|
||||
return RepositoryData.EMPTY;
|
||||
} catch (IOException ioe) {
|
||||
throw new RepositoryException(metadata.name(), "could not read repository data from index blob", ioe);
|
||||
}
|
||||
}
|
||||
|
||||
public static String testBlobPrefix(String seed) {
|
||||
return TESTS_FILE + seed;
|
||||
}
|
||||
|
@ -623,35 +736,30 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
return snapshotsBlobContainer;
|
||||
}
|
||||
|
||||
protected void writeSnapshotsToIndexGen(final List<SnapshotId> snapshots) throws IOException {
|
||||
protected void writeIndexGen(final RepositoryData repositoryData) throws IOException {
|
||||
assert isReadOnly() == false; // can not write to a read only repository
|
||||
final BytesReference snapshotsBytes;
|
||||
try (BytesStreamOutput bStream = new BytesStreamOutput()) {
|
||||
try (StreamOutput stream = new OutputStreamStreamOutput(bStream)) {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, stream);
|
||||
builder.startObject();
|
||||
builder.startArray(SNAPSHOTS);
|
||||
for (SnapshotId snapshot : snapshots) {
|
||||
builder.startObject();
|
||||
builder.field(NAME, snapshot.getName());
|
||||
builder.field(UUID, snapshot.getUUID());
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
repositoryData.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.close();
|
||||
}
|
||||
snapshotsBytes = bStream.bytes();
|
||||
}
|
||||
final long gen = latestIndexBlobId() + 1;
|
||||
// write the index file
|
||||
writeAtomic(SNAPSHOTS_FILE_PREFIX + Long.toString(gen), snapshotsBytes);
|
||||
writeAtomic(INDEX_FILE_PREFIX + Long.toString(gen), snapshotsBytes);
|
||||
// delete the N-2 index file if it exists, keep the previous one around as a backup
|
||||
if (isReadOnly() == false && gen - 2 >= 0) {
|
||||
final String oldSnapshotIndexFile = SNAPSHOTS_FILE_PREFIX + Long.toString(gen - 2);
|
||||
final String oldSnapshotIndexFile = INDEX_FILE_PREFIX + Long.toString(gen - 2);
|
||||
if (snapshotsBlobContainer.blobExists(oldSnapshotIndexFile)) {
|
||||
snapshotsBlobContainer.deleteBlob(oldSnapshotIndexFile);
|
||||
}
|
||||
// delete the old index file (non-generational) if it exists
|
||||
if (snapshotsBlobContainer.blobExists(SNAPSHOTS_FILE)) {
|
||||
snapshotsBlobContainer.deleteBlob(SNAPSHOTS_FILE);
|
||||
}
|
||||
}
|
||||
|
||||
// write the current generation to the index-latest file
|
||||
|
@ -660,72 +768,10 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
bStream.writeLong(gen);
|
||||
genBytes = bStream.bytes();
|
||||
}
|
||||
if (snapshotsBlobContainer.blobExists(SNAPSHOTS_INDEX_LATEST_BLOB)) {
|
||||
snapshotsBlobContainer.deleteBlob(SNAPSHOTS_INDEX_LATEST_BLOB);
|
||||
if (snapshotsBlobContainer.blobExists(INDEX_LATEST_BLOB)) {
|
||||
snapshotsBlobContainer.deleteBlob(INDEX_LATEST_BLOB);
|
||||
}
|
||||
writeAtomic(SNAPSHOTS_INDEX_LATEST_BLOB, genBytes);
|
||||
}
|
||||
|
||||
protected List<SnapshotId> readSnapshotsFromIndex() throws IOException {
|
||||
final long indexGen = latestIndexBlobId();
|
||||
final String snapshotsIndexBlobName;
|
||||
if (indexGen == -1) {
|
||||
// index-N file doesn't exist, either its a fresh repository, or its in the
|
||||
// old format, so look for the older index file before returning an empty list
|
||||
snapshotsIndexBlobName = SNAPSHOTS_FILE;
|
||||
} else {
|
||||
snapshotsIndexBlobName = SNAPSHOTS_FILE_PREFIX + Long.toString(indexGen);
|
||||
}
|
||||
|
||||
try (InputStream blob = snapshotsBlobContainer.readBlob(snapshotsIndexBlobName)) {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
Streams.copy(blob, out);
|
||||
ArrayList<SnapshotId> snapshots = new ArrayList<>();
|
||||
try (XContentParser parser = XContentHelper.createParser(out.bytes())) {
|
||||
if (parser.nextToken() == XContentParser.Token.START_OBJECT) {
|
||||
if (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||
String currentFieldName = parser.currentName();
|
||||
if (SNAPSHOTS.equals(currentFieldName)) {
|
||||
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
// the new format from 5.0 which contains the snapshot name and uuid
|
||||
String name = null;
|
||||
String uuid = null;
|
||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
currentFieldName = parser.currentName();
|
||||
parser.nextToken();
|
||||
if (NAME.equals(currentFieldName)) {
|
||||
name = parser.text();
|
||||
} else if (UUID.equals(currentFieldName)) {
|
||||
uuid = parser.text();
|
||||
}
|
||||
}
|
||||
snapshots.add(new SnapshotId(name, uuid));
|
||||
}
|
||||
// the old format pre 5.0 that only contains the snapshot name, use the name as the uuid too
|
||||
else {
|
||||
name = parser.text();
|
||||
snapshots.add(new SnapshotId(name, SnapshotId.UNASSIGNED_UUID));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return Collections.unmodifiableList(snapshots);
|
||||
}
|
||||
}
|
||||
|
||||
// Package private for testing
|
||||
static String blobId(final SnapshotId snapshotId) {
|
||||
final String uuid = snapshotId.getUUID();
|
||||
if (uuid.equals(SnapshotId.UNASSIGNED_UUID)) {
|
||||
// the old snapshot blob naming
|
||||
return snapshotId.getName();
|
||||
}
|
||||
return snapshotId.getName() + "-" + uuid;
|
||||
writeAtomic(INDEX_LATEST_BLOB, genBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -762,7 +808,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
|
||||
// package private for testing
|
||||
long readSnapshotIndexLatestBlob() throws IOException {
|
||||
try (InputStream blob = snapshotsBlobContainer.readBlob(SNAPSHOTS_INDEX_LATEST_BLOB)) {
|
||||
try (InputStream blob = snapshotsBlobContainer.readBlob(INDEX_LATEST_BLOB)) {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
Streams.copy(blob, out);
|
||||
return Numbers.bytesToLong(out.bytes().toBytesRef());
|
||||
|
@ -770,7 +816,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
|
||||
private long listBlobsToGetLatestIndexId() throws IOException {
|
||||
Map<String, BlobMetaData> blobs = snapshotsBlobContainer.listBlobsByPrefix(SNAPSHOTS_FILE_PREFIX);
|
||||
Map<String, BlobMetaData> blobs = snapshotsBlobContainer.listBlobsByPrefix(INDEX_FILE_PREFIX);
|
||||
long latest = -1;
|
||||
if (blobs.isEmpty()) {
|
||||
// no snapshot index blobs have been written yet
|
||||
|
@ -779,7 +825,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
for (final BlobMetaData blobMetaData : blobs.values()) {
|
||||
final String blobName = blobMetaData.name();
|
||||
try {
|
||||
final long curr = Long.parseLong(blobName.substring(SNAPSHOTS_FILE_PREFIX.length()));
|
||||
final long curr = Long.parseLong(blobName.substring(INDEX_FILE_PREFIX.length()));
|
||||
latest = Math.max(latest, curr);
|
||||
} catch (NumberFormatException nfe) {
|
||||
// the index- blob wasn't of the format index-N where N is a number,
|
||||
|
@ -802,9 +848,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {
|
||||
SnapshotContext snapshotContext = new SnapshotContext(shard, snapshotId, snapshotStatus);
|
||||
public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {
|
||||
SnapshotContext snapshotContext = new SnapshotContext(shard, snapshotId, indexId, snapshotStatus);
|
||||
snapshotStatus.startTime(System.currentTimeMillis());
|
||||
|
||||
try {
|
||||
|
@ -824,8 +872,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
|
||||
@Override
|
||||
public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState) {
|
||||
final RestoreContext snapshotContext = new RestoreContext(shard, snapshotId, version, snapshotShardId, recoveryState);
|
||||
public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) {
|
||||
final RestoreContext snapshotContext = new RestoreContext(shard, snapshotId, version, indexId, snapshotShardId, recoveryState);
|
||||
try {
|
||||
snapshotContext.restore();
|
||||
} catch (Exception e) {
|
||||
|
@ -834,8 +882,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
|
||||
@Override
|
||||
public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, ShardId shardId) {
|
||||
Context context = new Context(snapshotId, version, shardId);
|
||||
public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) {
|
||||
Context context = new Context(snapshotId, version, indexId, shardId);
|
||||
BlobStoreIndexShardSnapshot snapshot = context.loadSnapshot();
|
||||
IndexShardSnapshotStatus status = new IndexShardSnapshotStatus();
|
||||
status.updateStage(IndexShardSnapshotStatus.Stage.DONE);
|
||||
|
@ -869,8 +917,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
* @param snapshotId snapshot id
|
||||
* @param shardId shard id
|
||||
*/
|
||||
public void delete(SnapshotId snapshotId, Version version, ShardId shardId) {
|
||||
Context context = new Context(snapshotId, version, shardId, shardId);
|
||||
private void delete(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) {
|
||||
Context context = new Context(snapshotId, version, indexId, shardId, shardId);
|
||||
context.delete();
|
||||
}
|
||||
|
||||
|
@ -903,15 +951,15 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
|
||||
protected final Version version;
|
||||
|
||||
public Context(SnapshotId snapshotId, Version version, ShardId shardId) {
|
||||
this(snapshotId, version, shardId, shardId);
|
||||
public Context(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) {
|
||||
this(snapshotId, version, indexId, shardId, shardId);
|
||||
}
|
||||
|
||||
public Context(SnapshotId snapshotId, Version version, ShardId shardId, ShardId snapshotShardId) {
|
||||
public Context(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId, ShardId snapshotShardId) {
|
||||
this.snapshotId = snapshotId;
|
||||
this.version = version;
|
||||
this.shardId = shardId;
|
||||
blobContainer = blobStore().blobContainer(basePath().add("indices").add(snapshotShardId.getIndexName()).add(Integer.toString(snapshotShardId.getId())));
|
||||
blobContainer = blobStore().blobContainer(basePath().add("indices").add(indexId.getId()).add(Integer.toString(snapshotShardId.getId())));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -930,7 +978,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
int fileListGeneration = tuple.v2();
|
||||
|
||||
try {
|
||||
indexShardSnapshotFormat(version).delete(blobContainer, snapshotId.getName());
|
||||
indexShardSnapshotFormat(version).delete(blobContainer, snapshotId.getUUID());
|
||||
} catch (IOException e) {
|
||||
logger.debug("[{}] [{}] failed to delete shard snapshot file", shardId, snapshotId);
|
||||
}
|
||||
|
@ -951,7 +999,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
*/
|
||||
public BlobStoreIndexShardSnapshot loadSnapshot() {
|
||||
try {
|
||||
return indexShardSnapshotFormat(version).read(blobContainer, snapshotId.getName());
|
||||
return indexShardSnapshotFormat(version).read(blobContainer, snapshotId.getUUID());
|
||||
} catch (IOException ex) {
|
||||
throw new IndexShardRestoreFailedException(shardId, "failed to read shard snapshot file", ex);
|
||||
}
|
||||
|
@ -1080,7 +1128,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
try {
|
||||
BlobStoreIndexShardSnapshot snapshot = null;
|
||||
if (name.startsWith(SNAPSHOT_PREFIX)) {
|
||||
snapshot = indexShardSnapshotFormat.readBlob(blobContainer, name);
|
||||
snapshot = indexShardSnapshotFormat.readBlob(blobContainer, snapshotId.getUUID());
|
||||
} else if (name.startsWith(LEGACY_SNAPSHOT_PREFIX)) {
|
||||
snapshot = indexShardSnapshotLegacyFormat.readBlob(blobContainer, name);
|
||||
}
|
||||
|
@ -1109,10 +1157,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
*
|
||||
* @param shard shard to be snapshotted
|
||||
* @param snapshotId snapshot id
|
||||
* @param indexId the id of the index being snapshotted
|
||||
* @param snapshotStatus snapshot status to report progress
|
||||
*/
|
||||
public SnapshotContext(IndexShard shard, SnapshotId snapshotId, IndexShardSnapshotStatus snapshotStatus) {
|
||||
super(snapshotId, Version.CURRENT, shard.shardId());
|
||||
public SnapshotContext(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexShardSnapshotStatus snapshotStatus) {
|
||||
super(snapshotId, Version.CURRENT, indexId, shard.shardId());
|
||||
this.snapshotStatus = snapshotStatus;
|
||||
this.store = shard.store();
|
||||
}
|
||||
|
@ -1220,7 +1269,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
//TODO: The time stored in snapshot doesn't include cleanup time.
|
||||
logger.trace("[{}] [{}] writing shard snapshot file", shardId, snapshotId);
|
||||
try {
|
||||
indexShardSnapshotFormat.write(snapshot, blobContainer, snapshotId.getName());
|
||||
indexShardSnapshotFormat.write(snapshot, blobContainer, snapshotId.getUUID());
|
||||
} catch (IOException e) {
|
||||
throw new IndexShardSnapshotFailedException(shardId, "Failed to write commit point", e);
|
||||
}
|
||||
|
@ -1396,11 +1445,12 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
*
|
||||
* @param shard shard to restore into
|
||||
* @param snapshotId snapshot id
|
||||
* @param indexId id of the index being restored
|
||||
* @param snapshotShardId shard in the snapshot that data should be restored from
|
||||
* @param recoveryState recovery state to report progress
|
||||
*/
|
||||
public RestoreContext(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState) {
|
||||
super(snapshotId, version, shard.shardId(), snapshotShardId);
|
||||
public RestoreContext(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) {
|
||||
super(snapshotId, version, indexId, shard.shardId(), snapshotShardId);
|
||||
this.recoveryState = recoveryState;
|
||||
store = shard.store();
|
||||
}
|
||||
|
@ -1574,6 +1624,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.cluster.ClusterStateListener;
|
|||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.cache.Cache;
|
||||
|
@ -101,7 +100,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
|
|||
private final ScriptModes scriptModes;
|
||||
private final ScriptContextRegistry scriptContextRegistry;
|
||||
|
||||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
private final ScriptMetrics scriptMetrics = new ScriptMetrics();
|
||||
|
||||
private ClusterState clusterState;
|
||||
|
@ -113,7 +111,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
|
|||
Objects.requireNonNull(scriptEngineRegistry);
|
||||
Objects.requireNonNull(scriptContextRegistry);
|
||||
Objects.requireNonNull(scriptSettings);
|
||||
this.parseFieldMatcher = new ParseFieldMatcher(settings);
|
||||
if (Strings.hasLength(settings.get(DISABLE_DYNAMIC_SCRIPTING_SETTING))) {
|
||||
throw new IllegalArgumentException(DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings. \n" +
|
||||
"Dynamic scripts can be enabled for all languages and all operations by replacing `script.disable_dynamic: false` with `script.inline: true` and `script.stored: true` in elasticsearch.yml");
|
||||
|
|
|
@ -19,11 +19,9 @@
|
|||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -31,7 +29,6 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ScriptSettings {
|
||||
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Support for limiting the completion suggesters results to within a "context" like a geographic location or a category.
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.completion.context;
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Suggests alternate queries by fancy prefix matching.
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.completion;
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Support for completion suggesters with contexts built on 2.x indices.
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.completion2x.context;
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Support for completion suggesters built on 2.x indices.
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.completion2x;
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Support for suggesting alternate queries.
|
||||
*/
|
||||
package org.elasticsearch.search.suggest;
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Suggests alternate queries by breaking the query into terms and suggesting terms that are frequently found together.
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.phrase;
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Suggests alternate queries by breaking the query into terms and suggesting more popular terms.
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.term;
|
|
@ -63,8 +63,10 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
|||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.repositories.RepositoriesService;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
import org.elasticsearch.repositories.RepositoryData;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.EmptyTransportResponseHandler;
|
||||
import org.elasticsearch.transport.TransportChannel;
|
||||
|
@ -185,7 +187,8 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
|
|||
try {
|
||||
// Read snapshot info and metadata from the repository
|
||||
Repository repository = repositoriesService.repository(request.repositoryName);
|
||||
final Optional<SnapshotId> matchingSnapshotId = repository.getSnapshots().stream()
|
||||
final RepositoryData repositoryData = repository.getRepositoryData();
|
||||
final Optional<SnapshotId> matchingSnapshotId = repositoryData.getSnapshotIds().stream()
|
||||
.filter(s -> request.snapshotName.equals(s.getName())).findFirst();
|
||||
if (matchingSnapshotId.isPresent() == false) {
|
||||
throw new SnapshotRestoreException(request.repositoryName, request.snapshotName, "snapshot does not exist");
|
||||
|
@ -194,7 +197,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
|
|||
final SnapshotInfo snapshotInfo = repository.getSnapshotInfo(snapshotId);
|
||||
final Snapshot snapshot = new Snapshot(request.repositoryName, snapshotId);
|
||||
List<String> filteredIndices = SnapshotUtils.filterIndices(snapshotInfo.indices(), request.indices(), request.indicesOptions());
|
||||
MetaData metaDataIn = repository.getSnapshotMetaData(snapshotInfo, filteredIndices);
|
||||
MetaData metaDataIn = repository.getSnapshotMetaData(snapshotInfo, repositoryData.resolveIndices(filteredIndices));
|
||||
|
||||
final MetaData metaData;
|
||||
if (snapshotInfo.version().before(Version.V_2_0_0_beta1)) {
|
||||
|
|
|
@ -22,6 +22,9 @@ package org.elasticsearch.snapshots;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -29,12 +32,10 @@ import java.util.Objects;
|
|||
/**
|
||||
* SnapshotId - snapshot name + snapshot UUID
|
||||
*/
|
||||
public final class SnapshotId implements Writeable {
|
||||
public final class SnapshotId implements Writeable, ToXContent {
|
||||
|
||||
/**
|
||||
* This value is for older snapshots that don't have a UUID.
|
||||
*/
|
||||
public static final String UNASSIGNED_UUID = "_na_";
|
||||
private static final String NAME = "name";
|
||||
private static final String UUID = "uuid";
|
||||
|
||||
private final String name;
|
||||
private final String uuid;
|
||||
|
@ -115,4 +116,35 @@ public final class SnapshotId implements Writeable {
|
|||
out.writeString(uuid);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(NAME, name);
|
||||
builder.field(UUID, uuid);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static SnapshotId fromXContent(XContentParser parser) throws IOException {
|
||||
// the new format from 5.0 which contains the snapshot name and uuid
|
||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
String name = null;
|
||||
String uuid = null;
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
String currentFieldName = parser.currentName();
|
||||
parser.nextToken();
|
||||
if (NAME.equals(currentFieldName)) {
|
||||
name = parser.text();
|
||||
} else if (UUID.equals(currentFieldName)) {
|
||||
uuid = parser.text();
|
||||
}
|
||||
}
|
||||
return new SnapshotId(name, uuid);
|
||||
} else {
|
||||
// the old format pre 5.0 that only contains the snapshot name, use the name as the uuid too
|
||||
final String name = parser.text();
|
||||
return new SnapshotId(name, name);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -458,7 +458,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
|
|||
}
|
||||
if (uuid == null) {
|
||||
// the old format where there wasn't a UUID
|
||||
uuid = SnapshotId.UNASSIGNED_UUID;
|
||||
uuid = name;
|
||||
}
|
||||
return new SnapshotInfo(new SnapshotId(name, uuid),
|
||||
indices,
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException;
|
||||
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.EmptyTransportResponseHandler;
|
||||
|
@ -66,6 +67,8 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.concurrent.locks.Condition;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
|
@ -208,8 +211,11 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
|
|||
Map<Snapshot, Map<ShardId, IndexShardSnapshotStatus>> newSnapshots = new HashMap<>();
|
||||
// Now go through all snapshots and update existing or create missing
|
||||
final String localNodeId = clusterService.localNode().getId();
|
||||
final Map<Snapshot, Map<String, IndexId>> snapshotIndices = new HashMap<>();
|
||||
if (snapshotsInProgress != null) {
|
||||
for (SnapshotsInProgress.Entry entry : snapshotsInProgress.entries()) {
|
||||
snapshotIndices.put(entry.snapshot(),
|
||||
entry.indices().stream().collect(Collectors.toMap(IndexId::getName, Function.identity())));
|
||||
if (entry.state() == SnapshotsInProgress.State.STARTED) {
|
||||
Map<ShardId, IndexShardSnapshotStatus> startedShards = new HashMap<>();
|
||||
SnapshotShards snapshotShards = shardSnapshots.get(entry.snapshot());
|
||||
|
@ -289,14 +295,18 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
|
|||
if (newSnapshots.isEmpty() == false) {
|
||||
Executor executor = threadPool.executor(ThreadPool.Names.SNAPSHOT);
|
||||
for (final Map.Entry<Snapshot, Map<ShardId, IndexShardSnapshotStatus>> entry : newSnapshots.entrySet()) {
|
||||
Map<String, IndexId> indicesMap = snapshotIndices.get(entry.getKey());
|
||||
assert indicesMap != null;
|
||||
for (final Map.Entry<ShardId, IndexShardSnapshotStatus> shardEntry : entry.getValue().entrySet()) {
|
||||
final ShardId shardId = shardEntry.getKey();
|
||||
try {
|
||||
final IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShardOrNull(shardId.id());
|
||||
final IndexId indexId = indicesMap.get(shardId.getIndexName());
|
||||
assert indexId != null;
|
||||
executor.execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void doRun() {
|
||||
snapshot(indexShard, entry.getKey(), shardEntry.getValue());
|
||||
snapshot(indexShard, entry.getKey(), indexId, shardEntry.getValue());
|
||||
updateIndexShardSnapshotStatus(entry.getKey(), shardId, new SnapshotsInProgress.ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.SUCCESS));
|
||||
}
|
||||
|
||||
|
@ -321,7 +331,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
|
|||
* @param snapshot snapshot
|
||||
* @param snapshotStatus snapshot status
|
||||
*/
|
||||
private void snapshot(final IndexShard indexShard, final Snapshot snapshot, final IndexShardSnapshotStatus snapshotStatus) {
|
||||
private void snapshot(final IndexShard indexShard, final Snapshot snapshot, final IndexId indexId, final IndexShardSnapshotStatus snapshotStatus) {
|
||||
Repository repository = snapshotsService.getRepositoriesService().repository(snapshot.getRepository());
|
||||
ShardId shardId = indexShard.shardId();
|
||||
if (!indexShard.routingEntry().primary()) {
|
||||
|
@ -340,7 +350,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
|
|||
// we flush first to make sure we get the latest writes snapshotted
|
||||
IndexCommit snapshotIndexCommit = indexShard.snapshotIndex(true);
|
||||
try {
|
||||
repository.snapshotShard(indexShard, snapshot.getSnapshotId(), snapshotIndexCommit, snapshotStatus);
|
||||
repository.snapshotShard(indexShard, snapshot.getSnapshotId(), indexId, snapshotIndexCommit, snapshotStatus);
|
||||
if (logger.isDebugEnabled()) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(" index : version [").append(snapshotStatus.indexVersion()).append("], number_of_files [").append(snapshotStatus.numberOfFiles()).append("] with total_size [").append(new ByteSizeValue(snapshotStatus.totalSize())).append("]\n");
|
||||
|
|
|
@ -56,8 +56,10 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.repositories.RepositoriesService;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
import org.elasticsearch.repositories.RepositoryData;
|
||||
import org.elasticsearch.repositories.RepositoryMissingException;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -132,7 +134,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
public List<SnapshotId> snapshotIds(final String repositoryName) {
|
||||
Repository repository = repositoriesService.repository(repositoryName);
|
||||
assert repository != null; // should only be called once we've validated the repository exists
|
||||
return repository.getSnapshots();
|
||||
return repository.getRepositoryData().getSnapshotIds();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -218,6 +220,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
final String snapshotName = request.snapshotName;
|
||||
validate(repositoryName, snapshotName);
|
||||
final SnapshotId snapshotId = new SnapshotId(snapshotName, UUIDs.randomBase64UUID()); // new UUID for the snapshot
|
||||
final RepositoryData repositoryData = repositoriesService.repository(repositoryName).getRepositoryData();
|
||||
|
||||
clusterService.submitStateUpdateTask(request.cause(), new ClusterStateUpdateTask() {
|
||||
|
||||
|
@ -232,11 +235,12 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
// Store newSnapshot here to be processed in clusterStateProcessed
|
||||
List<String> indices = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(currentState, request.indicesOptions(), request.indices()));
|
||||
logger.trace("[{}][{}] creating snapshot for indices [{}]", repositoryName, snapshotName, indices);
|
||||
List<IndexId> snapshotIndices = repositoryData.resolveNewIndices(indices);
|
||||
newSnapshot = new SnapshotsInProgress.Entry(new Snapshot(repositoryName, snapshotId),
|
||||
request.includeGlobalState(),
|
||||
request.partial(),
|
||||
State.INIT,
|
||||
indices,
|
||||
snapshotIndices,
|
||||
System.currentTimeMillis(),
|
||||
null);
|
||||
snapshots = new SnapshotsInProgress(newSnapshot);
|
||||
|
@ -334,8 +338,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
if (!snapshot.includeGlobalState()) {
|
||||
// Remove global state from the cluster state
|
||||
MetaData.Builder builder = MetaData.builder();
|
||||
for (String index : snapshot.indices()) {
|
||||
builder.put(metaData.index(index), false);
|
||||
for (IndexId index : snapshot.indices()) {
|
||||
builder.put(metaData.index(index.getName()), false);
|
||||
}
|
||||
metaData = builder.build();
|
||||
}
|
||||
|
@ -473,7 +477,9 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
}
|
||||
|
||||
private SnapshotInfo inProgressSnapshot(SnapshotsInProgress.Entry entry) {
|
||||
return new SnapshotInfo(entry.snapshot().getSnapshotId(), entry.indices(), entry.startTime());
|
||||
return new SnapshotInfo(entry.snapshot().getSnapshotId(),
|
||||
entry.indices().stream().map(IndexId::getName).collect(Collectors.toList()),
|
||||
entry.startTime());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -546,8 +552,10 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
final SnapshotInfo snapshotInfo) throws IOException {
|
||||
Map<ShardId, IndexShardSnapshotStatus> shardStatus = new HashMap<>();
|
||||
Repository repository = repositoriesService.repository(repositoryName);
|
||||
MetaData metaData = repository.getSnapshotMetaData(snapshotInfo, snapshotInfo.indices());
|
||||
RepositoryData repositoryData = repository.getRepositoryData();
|
||||
MetaData metaData = repository.getSnapshotMetaData(snapshotInfo, repositoryData.resolveIndices(snapshotInfo.indices()));
|
||||
for (String index : snapshotInfo.indices()) {
|
||||
IndexId indexId = repositoryData.resolveIndexId(index);
|
||||
IndexMetaData indexMetaData = metaData.indices().get(index);
|
||||
if (indexMetaData != null) {
|
||||
int numberOfShards = indexMetaData.getNumberOfShards();
|
||||
|
@ -561,7 +569,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
shardStatus.put(shardId, shardSnapshotStatus);
|
||||
} else {
|
||||
IndexShardSnapshotStatus shardSnapshotStatus =
|
||||
repository.getShardSnapshotStatus(snapshotInfo.snapshotId(), snapshotInfo.version(), shardId);
|
||||
repository.getShardSnapshotStatus(snapshotInfo.snapshotId(), snapshotInfo.version(), indexId, shardId);
|
||||
shardStatus.put(shardId, shardSnapshotStatus);
|
||||
}
|
||||
}
|
||||
|
@ -953,7 +961,10 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
public void deleteSnapshot(final String repositoryName, final String snapshotName, final DeleteSnapshotListener listener) {
|
||||
// First, look for the snapshot in the repository
|
||||
final Repository repository = repositoriesService.repository(repositoryName);
|
||||
Optional<SnapshotId> matchedEntry = repository.getSnapshots().stream().filter(s -> s.getName().equals(snapshotName)).findFirst();
|
||||
Optional<SnapshotId> matchedEntry = repository.getRepositoryData().getSnapshotIds()
|
||||
.stream()
|
||||
.filter(s -> s.getName().equals(snapshotName))
|
||||
.findFirst();
|
||||
// if nothing found by the same name, then look in the cluster state for current in progress snapshots
|
||||
if (matchedEntry.isPresent() == false) {
|
||||
matchedEntry = currentSnapshots(repositoryName, Collections.emptyList()).stream()
|
||||
|
@ -1121,21 +1132,22 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
* @param indices list of indices to be snapshotted
|
||||
* @return list of shard to be included into current snapshot
|
||||
*/
|
||||
private ImmutableOpenMap<ShardId, SnapshotsInProgress.ShardSnapshotStatus> shards(ClusterState clusterState, List<String> indices) {
|
||||
private ImmutableOpenMap<ShardId, SnapshotsInProgress.ShardSnapshotStatus> shards(ClusterState clusterState, List<IndexId> indices) {
|
||||
ImmutableOpenMap.Builder<ShardId, SnapshotsInProgress.ShardSnapshotStatus> builder = ImmutableOpenMap.builder();
|
||||
MetaData metaData = clusterState.metaData();
|
||||
for (String index : indices) {
|
||||
IndexMetaData indexMetaData = metaData.index(index);
|
||||
for (IndexId index : indices) {
|
||||
final String indexName = index.getName();
|
||||
IndexMetaData indexMetaData = metaData.index(indexName);
|
||||
if (indexMetaData == null) {
|
||||
// The index was deleted before we managed to start the snapshot - mark it as missing.
|
||||
builder.put(new ShardId(index, IndexMetaData.INDEX_UUID_NA_VALUE, 0), new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "missing index"));
|
||||
builder.put(new ShardId(indexName, IndexMetaData.INDEX_UUID_NA_VALUE, 0), new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "missing index"));
|
||||
} else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
|
||||
for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) {
|
||||
ShardId shardId = new ShardId(indexMetaData.getIndex(), i);
|
||||
builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "index is closed"));
|
||||
}
|
||||
} else {
|
||||
IndexRoutingTable indexRoutingTable = clusterState.getRoutingTable().index(index);
|
||||
IndexRoutingTable indexRoutingTable = clusterState.getRoutingTable().index(indexName);
|
||||
for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) {
|
||||
ShardId shardId = new ShardId(indexMetaData.getIndex(), i);
|
||||
if (indexRoutingTable != null) {
|
||||
|
@ -1191,8 +1203,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
|
|||
for (final SnapshotsInProgress.Entry entry : snapshots.entries()) {
|
||||
if (entry.partial() == false) {
|
||||
if (entry.state() == State.INIT) {
|
||||
for (String index : entry.indices()) {
|
||||
IndexMetaData indexMetaData = currentState.metaData().index(index);
|
||||
for (IndexId index : entry.indices()) {
|
||||
IndexMetaData indexMetaData = currentState.metaData().index(index.getName());
|
||||
if (indexMetaData != null && indices.contains(indexMetaData)) {
|
||||
if (indicesToFail == null) {
|
||||
indicesToFail = new HashSet<>();
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Support for viewing and modifying in flight actions ({@link org.elasticsearch.tasks.Task}s) and saving their results to an index. This
|
||||
* includes getting detailed descriptions and canceling tasks that support it.
|
||||
*/
|
||||
package org.elasticsearch.tasks;
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action;
|
||||
|
||||
import org.elasticsearch.action.DocWriteResponse.Operation;
|
||||
import org.elasticsearch.action.DocWriteResponse.Result;
|
||||
import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.not;
|
|||
|
||||
public class DocWriteResponseTests extends ESTestCase {
|
||||
public void testGetLocation() {
|
||||
DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Operation.CREATE) {
|
||||
DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Result.CREATED) {
|
||||
// DocWriteResponse is abstract so we have to sneak a subclass in here to test it.
|
||||
};
|
||||
assertEquals("/index/type/id", response.getLocation(null));
|
||||
|
@ -48,7 +48,7 @@ public class DocWriteResponseTests extends ESTestCase {
|
|||
* is true. We can't assert this in the yaml tests because "not found" is also "false" there....
|
||||
*/
|
||||
public void testToXContentDoesntIncludeForcedRefreshUnlessForced() throws IOException {
|
||||
DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Operation.CREATE) {
|
||||
DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Result.CREATED) {
|
||||
// DocWriteResponse is abstract so we have to sneak a subclass in here to test it.
|
||||
};
|
||||
response.setShardInfo(new ShardInfo(1, 1));
|
||||
|
|
|
@ -234,7 +234,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
|
|||
client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get();
|
||||
UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").doc("field1", "value1");
|
||||
UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
|
||||
clearInterceptedActions();
|
||||
assertSameIndices(updateRequest, updateShardActions);
|
||||
|
@ -248,7 +248,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
|
|||
String indexOrAlias = randomIndexOrAlias();
|
||||
UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").upsert("field", "value").doc("field1", "value1");
|
||||
UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult());
|
||||
|
||||
clearInterceptedActions();
|
||||
assertSameIndices(updateRequest, updateShardActions);
|
||||
|
@ -264,7 +264,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
|
|||
UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id")
|
||||
.script(new Script("ctx.op='delete'", ScriptService.ScriptType.INLINE, CustomScriptPlugin.NAME, Collections.emptyMap()));
|
||||
UpdateResponse updateResponse = internalCluster().coordOnlyNodeClient().update(updateRequest).actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult());
|
||||
|
||||
clearInterceptedActions();
|
||||
assertSameIndices(updateRequest, updateShardActions);
|
||||
|
|
|
@ -207,11 +207,11 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
.add(client().prepareIndex("test", "type", "2").setCreate(true).setSource("field", "1"))
|
||||
.add(client().prepareIndex("test", "type", "1").setSource("field", "2")).get();
|
||||
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, bulkResponse.getItems()[0].getResponse().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult());
|
||||
assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L));
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, bulkResponse.getItems()[1].getResponse().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[1].getResponse().getResult());
|
||||
assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(1L));
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, bulkResponse.getItems()[2].getResponse().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, bulkResponse.getItems()[2].getResponse().getResult());
|
||||
assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(2L));
|
||||
|
||||
bulkResponse = client().prepareBulk()
|
||||
|
@ -232,11 +232,11 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
.setSource("field", "2").setVersion(12).setVersionType(VersionType.EXTERNAL))
|
||||
.get();
|
||||
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, bulkResponse.getItems()[0].getResponse().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult());
|
||||
assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(10L));
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, bulkResponse.getItems()[1].getResponse().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[1].getResponse().getResult());
|
||||
assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(10L));
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, bulkResponse.getItems()[2].getResponse().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, bulkResponse.getItems()[2].getResponse().getResult());
|
||||
assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(12L));
|
||||
|
||||
bulkResponse = client().prepareBulk()
|
||||
|
|
|
@ -98,7 +98,7 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase {
|
|||
for (int i = 0; i < 10; i++) {
|
||||
// index data with mapping changes
|
||||
IndexResponse response = client(dataNode).prepareIndex("myindex", "mytype").setSource("field_" + i, "val").get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, response.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, response.getResult());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -119,8 +119,8 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
for (int i = 0; i < numDocs; i++) {
|
||||
String routingKey = routing ? randomRealisticUnicodeOfLength(10) : null;
|
||||
String id = Integer.toString(i);
|
||||
assertEquals(id, DocWriteResponse.Operation.CREATE, client().prepareIndex("test", "type1", id)
|
||||
.setRouting(routingKey).setSource("field1", English.intToEnglish(i)).get().getOperation());
|
||||
assertEquals(id, DocWriteResponse.Result.CREATED, client().prepareIndex("test", "type1", id)
|
||||
.setRouting(routingKey).setSource("field1", English.intToEnglish(i)).get().getResult());
|
||||
GetResponse get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(1).get();
|
||||
assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
|
||||
assertThat(get.getVersion(), equalTo(1L));
|
||||
|
@ -478,7 +478,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs));
|
||||
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "test", firstDocId).setRouting("routing").get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
GetResponse getResponse = client().prepareGet("test", "test", firstDocId).setRouting("routing").get();
|
||||
assertThat(getResponse.isExists(), equalTo(false));
|
||||
refresh();
|
||||
|
@ -493,7 +493,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
int numDocs = iterations(10, 50);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
IndexResponse indexResponse = client().prepareIndex(indexOrAlias(), "type", Integer.toString(i)).setSource("field", "value-" + i).get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
assertThat(indexResponse.getIndex(), equalTo("test"));
|
||||
assertThat(indexResponse.getType(), equalTo("type"));
|
||||
assertThat(indexResponse.getId(), equalTo(Integer.toString(i)));
|
||||
|
@ -508,7 +508,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
assertThat(getResponse.getId(), equalTo(docId));
|
||||
|
||||
DeleteResponse deleteResponse = client().prepareDelete(indexOrAlias(), "type", docId).get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getIndex(), equalTo("test"));
|
||||
assertThat(deleteResponse.getType(), equalTo("type"));
|
||||
assertThat(deleteResponse.getId(), equalTo(docId));
|
||||
|
@ -532,7 +532,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
assertThat(updateResponse.getType(), equalTo("type1"));
|
||||
assertThat(updateResponse.getId(), equalTo("1"));
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult());
|
||||
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
|
@ -543,7 +543,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
assertThat(updateResponse.getType(), equalTo("type1"));
|
||||
assertThat(updateResponse.getId(), equalTo("1"));
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
|
||||
getResponse = client().prepareGet("test", "type1", "1").get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
|
|
|
@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
|||
* as blob names and repository blob formats have changed between the snapshot versions.
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST)
|
||||
// this test sometimes fails in recovery when the recovery is reset, increasing the logging level to help debug
|
||||
// this test sometimes fails in recovery when the recovery is reset, increasing the logging level to help debug
|
||||
@TestLogging("indices.recovery:DEBUG")
|
||||
public class RepositoryUpgradabilityIT extends AbstractSnapshotIntegTestCase {
|
||||
|
||||
|
@ -70,7 +70,7 @@ public class RepositoryUpgradabilityIT extends AbstractSnapshotIntegTestCase {
|
|||
final Set<SnapshotInfo> snapshotInfos = Sets.newHashSet(getSnapshots(repoName));
|
||||
assertThat(snapshotInfos.size(), equalTo(1));
|
||||
SnapshotInfo originalSnapshot = snapshotInfos.iterator().next();
|
||||
assertThat(originalSnapshot.snapshotId(), equalTo(new SnapshotId("test_1", SnapshotId.UNASSIGNED_UUID)));
|
||||
assertThat(originalSnapshot.snapshotId(), equalTo(new SnapshotId("test_1", "test_1")));
|
||||
assertThat(Sets.newHashSet(originalSnapshot.indices()), equalTo(indices));
|
||||
|
||||
logger.info("--> restore the original snapshot");
|
||||
|
|
|
@ -53,7 +53,6 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.snapshots.Snapshot;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collections;
|
||||
|
@ -659,7 +658,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
|
|||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
SnapshotsInProgress.State.fromValue((byte) randomIntBetween(0, 6)),
|
||||
Collections.<String>emptyList(),
|
||||
Collections.emptyList(),
|
||||
Math.abs(randomLong()),
|
||||
ImmutableOpenMap.of()));
|
||||
case 1:
|
||||
|
|
|
@ -491,7 +491,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
logger.trace("[{}] indexing id [{}] through node [{}] targeting shard [{}]", name, id, node, shard);
|
||||
IndexResponse response =
|
||||
client.prepareIndex("test", "type", id).setSource("{}").setTimeout(timeout).get(timeout);
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, response.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, response.getResult());
|
||||
ackedDocs.put(id, node);
|
||||
logger.trace("[{}] indexed id [{}] through node [{}]", name, id, node);
|
||||
} catch (ElasticsearchException e) {
|
||||
|
|
|
@ -177,7 +177,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_2"));
|
||||
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
|
||||
response = client().prepareGet(indexOrAlias(), "type1", "1").get();
|
||||
assertThat(response.isExists(), equalTo(false));
|
||||
|
|
|
@ -415,7 +415,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
try {
|
||||
final IndexResponse indexResponse = client().prepareIndex(IDX, "doc",
|
||||
Integer.toString(counter.incrementAndGet())).setSource("foo", "bar").get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
} catch (Exception e) {
|
||||
exceptions.add(e);
|
||||
}
|
||||
|
@ -508,7 +508,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
while (counter.get() < (numPhase1Docs + numPhase2Docs + numPhase3Docs)) {
|
||||
final IndexResponse indexResponse = client().prepareIndex(IDX, "doc",
|
||||
Integer.toString(counter.incrementAndGet())).setSource("foo", "bar").get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
final int docCount = counter.get();
|
||||
if (docCount == numPhase1Docs) {
|
||||
phase1finished.countDown();
|
||||
|
|
|
@ -84,7 +84,7 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
|||
|
||||
// Now delete with blockUntilRefresh
|
||||
DeleteResponse delete = client().prepareDelete("test", "test", "1").setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, delete.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, delete.getResult());
|
||||
assertFalse("request shouldn't have forced a refresh", delete.forcedRefresh());
|
||||
assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get());
|
||||
}
|
||||
|
|
|
@ -100,8 +100,8 @@ public class DynamicMappingIT extends ESIntegTestCase {
|
|||
public void run() {
|
||||
try {
|
||||
startLatch.await();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, client().prepareIndex("index", "type", id)
|
||||
.setSource("field" + id, "bar").get().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, client().prepareIndex("index", "type", id)
|
||||
.setSource("field" + id, "bar").get().getResult());
|
||||
} catch (Exception e) {
|
||||
error.compareAndSet(null, e);
|
||||
}
|
||||
|
|
|
@ -137,13 +137,13 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase {
|
|||
.endObject().endObject()).get();
|
||||
ensureGreen();
|
||||
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, prepareIndex("single", "I have four terms").get().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, prepareIndex("single", "I have four terms").get().getResult());
|
||||
BulkResponse bulk = client().prepareBulk()
|
||||
.add(prepareIndex("bulk1", "bulk three terms"))
|
||||
.add(prepareIndex("bulk2", "this has five bulk terms")).get();
|
||||
assertFalse(bulk.buildFailureMessage(), bulk.hasFailures());
|
||||
assertEquals(DocWriteResponse.Operation.CREATE,
|
||||
prepareIndex("multi", "two terms", "wow now I have seven lucky terms").get().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED,
|
||||
prepareIndex("multi", "two terms", "wow now I have seven lucky terms").get().getResult());
|
||||
bulk = client().prepareBulk()
|
||||
.add(prepareIndex("multibulk1", "one", "oh wow now I have eight unlucky terms"))
|
||||
.add(prepareIndex("multibulk2", "six is a bunch of terms", "ten! ten terms is just crazy! too many too count!")).get();
|
||||
|
|
|
@ -449,7 +449,7 @@ public class LegacyDateMappingTests extends ESSingleNodeTestCase {
|
|||
ParsedDocument doc = defaultMapper.parse("test", "type", "1", document.bytes());
|
||||
assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L));
|
||||
IndexResponse indexResponse = client().prepareIndex("test2", "test").setSource(document).get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
// integers should always be parsed as well... cannot be sure it is a unix timestamp only
|
||||
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
|
@ -459,7 +459,7 @@ public class LegacyDateMappingTests extends ESSingleNodeTestCase {
|
|||
.bytes());
|
||||
assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L));
|
||||
indexResponse = client().prepareIndex("test", "test").setSource(document).get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
}
|
||||
|
||||
public void testThatNewIndicesOnlyAllowStrictDates() throws Exception {
|
||||
|
|
|
@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
public class ScriptQueryBuilderTests extends AbstractQueryTestCase<ScriptQueryBuilder> {
|
||||
@Override
|
||||
protected ScriptQueryBuilder doCreateTestQueryBuilder() {
|
||||
String script = "5";
|
||||
String script = "1";
|
||||
Map<String, Object> params = Collections.emptyMap();
|
||||
return new ScriptQueryBuilder(new Script(script, ScriptType.INLINE, MockScriptEngine.NAME, params));
|
||||
}
|
||||
|
|
|
@ -155,7 +155,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
functionBuilder = fieldValueFactorFunctionBuilder;
|
||||
break;
|
||||
case 2:
|
||||
String script = "5";
|
||||
String script = "1";
|
||||
Map<String, Object> params = Collections.emptyMap();
|
||||
functionBuilder = new ScriptScoreFunctionBuilder(
|
||||
new Script(script, ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, params));
|
||||
|
|
|
@ -30,8 +30,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.index.TransportIndexAction;
|
||||
|
@ -82,8 +80,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging;
|
|||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
@ -257,7 +253,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase {
|
|||
final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", Integer.toString(docId.incrementAndGet()))
|
||||
.source("{}");
|
||||
final IndexResponse response = index(indexRequest);
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, response.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, response.getResult());
|
||||
}
|
||||
return numOfDoc;
|
||||
}
|
||||
|
|
|
@ -100,7 +100,9 @@ import org.elasticsearch.index.translog.Translog;
|
|||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
import org.elasticsearch.repositories.RepositoryData;
|
||||
import org.elasticsearch.snapshots.Snapshot;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
import org.elasticsearch.snapshots.SnapshotInfo;
|
||||
|
@ -121,8 +123,10 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BrokenBarrierException;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -1184,9 +1188,9 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
test_target_shard.updateRoutingEntry(routing);
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT);
|
||||
test_target_shard.markAsRecovering("store", new RecoveryState(routing.shardId(), routing.primary(), RecoveryState.Type.SNAPSHOT, routing.restoreSource(), localNode));
|
||||
assertTrue(test_target_shard.restoreFromRepository(new RestoreOnlyRepository() {
|
||||
assertTrue(test_target_shard.restoreFromRepository(new RestoreOnlyRepository("test") {
|
||||
@Override
|
||||
public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState) {
|
||||
public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) {
|
||||
try {
|
||||
cleanLuceneIndex(targetStore.directory());
|
||||
for (String file : sourceStore.directory().listAll()) {
|
||||
|
@ -1645,8 +1649,10 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
|
||||
/** A dummy repository for testing which just needs restore overridden */
|
||||
private abstract static class RestoreOnlyRepository extends AbstractLifecycleComponent implements Repository {
|
||||
public RestoreOnlyRepository() {
|
||||
private final String indexName;
|
||||
public RestoreOnlyRepository(String indexName) {
|
||||
super(Settings.EMPTY);
|
||||
this.indexName = indexName;
|
||||
}
|
||||
@Override
|
||||
protected void doStart() {}
|
||||
|
@ -1663,17 +1669,19 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
return null;
|
||||
}
|
||||
@Override
|
||||
public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List<String> indices) throws IOException {
|
||||
public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List<IndexId> indices) throws IOException {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public List<SnapshotId> getSnapshots() {
|
||||
return null;
|
||||
public RepositoryData getRepositoryData() {
|
||||
Map<IndexId, Set<SnapshotId>> map = new HashMap<>();
|
||||
map.put(new IndexId(indexName, "blah"), Collections.emptySet());
|
||||
return new RepositoryData(Collections.emptyList(), map);
|
||||
}
|
||||
@Override
|
||||
public void initializeSnapshot(SnapshotId snapshotId, List<String> indices, MetaData metaData) {}
|
||||
public void initializeSnapshot(SnapshotId snapshotId, List<IndexId> indices, MetaData metaData) {}
|
||||
@Override
|
||||
public SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List<String> indices, long startTime, String failure, int totalShards, List<SnapshotShardFailure> shardFailures) {
|
||||
public SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List<IndexId> indices, long startTime, String failure, int totalShards, List<SnapshotShardFailure> shardFailures) {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
|
@ -1697,9 +1705,9 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
return false;
|
||||
}
|
||||
@Override
|
||||
public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {}
|
||||
public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {}
|
||||
@Override
|
||||
public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, ShardId shardId) {
|
||||
public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
|
|
|
@ -94,15 +94,15 @@ public class IndexActionIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult());
|
||||
|
||||
client().prepareDelete("test", "type", "1").execute().actionGet();
|
||||
|
||||
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
}
|
||||
|
||||
|
@ -111,14 +111,14 @@ public class IndexActionIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
client().prepareDelete("test", "type", "1").execute().actionGet();
|
||||
|
||||
flush();
|
||||
|
||||
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
}
|
||||
|
||||
public void testCreatedFlagParallelExecution() throws Exception {
|
||||
|
@ -139,7 +139,7 @@ public class IndexActionIT extends ESIntegTestCase {
|
|||
public Void call() throws Exception {
|
||||
int docId = random.nextInt(docCount);
|
||||
IndexResponse indexResponse = index("test", "type", Integer.toString(docId), "field1", "value");
|
||||
if (indexResponse.getOperation() == DocWriteResponse.Operation.CREATE) {
|
||||
if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) {
|
||||
createdCounts.incrementAndGet(docId);
|
||||
}
|
||||
return null;
|
||||
|
@ -161,7 +161,7 @@ public class IndexActionIT extends ESIntegTestCase {
|
|||
|
||||
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(123)
|
||||
.setVersionType(VersionType.EXTERNAL).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
}
|
||||
|
||||
public void testCreateFlagWithBulk() {
|
||||
|
@ -172,7 +172,7 @@ public class IndexActionIT extends ESIntegTestCase {
|
|||
assertThat(bulkResponse.hasFailures(), equalTo(false));
|
||||
assertThat(bulkResponse.getItems().length, equalTo(1));
|
||||
IndexResponse indexResponse = bulkResponse.getItems()[0].getResponse();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
}
|
||||
|
||||
public void testCreateIndexWithLongName() {
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
|||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.update.UpdateHelper;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -76,15 +75,15 @@ public class DateMathIndexExpressionsIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(indicesStatsResponse.getIndex(index3), notNullValue());
|
||||
|
||||
DeleteResponse deleteResponse = client().prepareDelete(dateMathExp1, "type", "1").get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getId(), equalTo("1"));
|
||||
|
||||
deleteResponse = client().prepareDelete(dateMathExp2, "type", "2").get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getId(), equalTo("2"));
|
||||
|
||||
deleteResponse = client().prepareDelete(dateMathExp3, "type", "3").get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getId(), equalTo("3"));
|
||||
}
|
||||
|
||||
|
|
|
@ -56,9 +56,9 @@ public class IndexPrimaryRelocationIT extends ESIntegTestCase {
|
|||
public void run() {
|
||||
while (finished.get() == false) {
|
||||
IndexResponse indexResponse = client().prepareIndex("test", "type", "id").setSource("field", "value").get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "id").get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.action.search.SearchType;
|
|||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -1037,8 +1036,8 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
assertThat(stats.getTotal().queryCache.getCacheSize(), greaterThan(0L));
|
||||
});
|
||||
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, client().prepareDelete("index", "type", "1").get().getOperation());
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, client().prepareDelete("index", "type", "2").get().getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "type", "1").get().getResult());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, client().prepareDelete("index", "type", "2").get().getResult());
|
||||
refresh();
|
||||
response = client().admin().indices().prepareStats("index").setQueryCache(true).get();
|
||||
assertCumulativeQueryCacheStats(response);
|
||||
|
|
|
@ -162,7 +162,7 @@ public class IngestClientIT extends ESIntegTestCase {
|
|||
itemResponse.isFailed(), is(false));
|
||||
assertThat(indexResponse, notNullValue());
|
||||
assertThat(indexResponse.getId(), equalTo(Integer.toString(i)));
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories;
|
||||
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Tests for the {@link IndexId} class.
|
||||
*/
|
||||
public class IndexIdTests extends ESTestCase {
|
||||
|
||||
public void testEqualsAndHashCode() {
|
||||
// assert equals and hashcode
|
||||
String name = randomAsciiOfLength(8);
|
||||
String id = UUIDs.randomBase64UUID();
|
||||
IndexId indexId1 = new IndexId(name, id);
|
||||
IndexId indexId2 = new IndexId(name, id);
|
||||
assertEquals(indexId1, indexId2);
|
||||
assertEquals(indexId1.hashCode(), indexId2.hashCode());
|
||||
// assert equals when using index name for id
|
||||
id = name;
|
||||
indexId1 = new IndexId(name, id);
|
||||
indexId2 = new IndexId(name, id);
|
||||
assertEquals(indexId1, indexId2);
|
||||
assertEquals(indexId1.hashCode(), indexId2.hashCode());
|
||||
//assert not equals when name or id differ
|
||||
indexId2 = new IndexId(randomAsciiOfLength(8), id);
|
||||
assertNotEquals(indexId1, indexId2);
|
||||
assertNotEquals(indexId1.hashCode(), indexId2.hashCode());
|
||||
indexId2 = new IndexId(name, UUIDs.randomBase64UUID());
|
||||
assertNotEquals(indexId1, indexId2);
|
||||
assertNotEquals(indexId1.hashCode(), indexId2.hashCode());
|
||||
}
|
||||
|
||||
public void testSerialization() throws IOException {
|
||||
IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID());
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
indexId.writeTo(out);
|
||||
assertEquals(indexId, new IndexId(out.bytes().streamInput()));
|
||||
}
|
||||
|
||||
public void testXContent() throws IOException {
|
||||
IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID());
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
indexId.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
XContentParser parser = XContentType.JSON.xContent().createParser(builder.bytes());
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
String name = null;
|
||||
String id = null;
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
final String currentFieldName = parser.currentName();
|
||||
parser.nextToken();
|
||||
if (currentFieldName.equals(IndexId.NAME)) {
|
||||
name = parser.text();
|
||||
} else if (currentFieldName.equals(IndexId.ID)) {
|
||||
id = parser.text();
|
||||
}
|
||||
}
|
||||
assertNotNull(name);
|
||||
assertNotNull(id);
|
||||
assertEquals(indexId, new IndexId(name, id));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,171 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories;
|
||||
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
|
||||
/**
|
||||
* Tests for the {@link RepositoryData} class.
|
||||
*/
|
||||
public class RepositoryDataTests extends ESTestCase {
|
||||
|
||||
public void testEqualsAndHashCode() {
|
||||
RepositoryData repositoryData1 = generateRandomRepoData();
|
||||
RepositoryData repositoryData2 = repositoryData1.copy();
|
||||
assertEquals(repositoryData1, repositoryData2);
|
||||
assertEquals(repositoryData1.hashCode(), repositoryData2.hashCode());
|
||||
}
|
||||
|
||||
public void testXContent() throws IOException {
|
||||
RepositoryData repositoryData = generateRandomRepoData();
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
repositoryData.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
XContentParser parser = XContentType.JSON.xContent().createParser(builder.bytes());
|
||||
assertEquals(repositoryData, RepositoryData.fromXContent(parser));
|
||||
}
|
||||
|
||||
public void testAddSnapshots() {
|
||||
RepositoryData repositoryData = generateRandomRepoData();
|
||||
// test that adding the same snapshot id to the repository data throws an exception
|
||||
final SnapshotId snapshotId = repositoryData.getSnapshotIds().get(0);
|
||||
Map<String, IndexId> indexIdMap = repositoryData.getIndices();
|
||||
expectThrows(IllegalArgumentException.class,
|
||||
() -> repositoryData.addSnapshot(new SnapshotId(snapshotId.getName(), snapshotId.getUUID()), Collections.emptyList()));
|
||||
// test that adding a snapshot and its indices works
|
||||
SnapshotId newSnapshot = new SnapshotId(randomAsciiOfLength(7), UUIDs.randomBase64UUID());
|
||||
List<IndexId> indices = new ArrayList<>();
|
||||
Set<IndexId> newIndices = new HashSet<>();
|
||||
int numNew = randomIntBetween(1, 10);
|
||||
for (int i = 0; i < numNew; i++) {
|
||||
IndexId indexId = new IndexId(randomAsciiOfLength(7), UUIDs.randomBase64UUID());
|
||||
newIndices.add(indexId);
|
||||
indices.add(indexId);
|
||||
}
|
||||
int numOld = randomIntBetween(1, indexIdMap.size());
|
||||
List<String> indexNames = new ArrayList<>(indexIdMap.keySet());
|
||||
for (int i = 0; i < numOld; i++) {
|
||||
indices.add(indexIdMap.get(indexNames.get(i)));
|
||||
}
|
||||
RepositoryData newRepoData = repositoryData.addSnapshot(newSnapshot, indices);
|
||||
// verify that the new repository data has the new snapshot and its indices
|
||||
assertTrue(newRepoData.getSnapshotIds().contains(newSnapshot));
|
||||
for (IndexId indexId : indices) {
|
||||
Set<SnapshotId> snapshotIds = newRepoData.getSnapshots(indexId);
|
||||
assertTrue(snapshotIds.contains(newSnapshot));
|
||||
if (newIndices.contains(indexId)) {
|
||||
assertEquals(snapshotIds.size(), 1); // if it was a new index, only the new snapshot should be in its set
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testInitIndices() {
|
||||
final int numSnapshots = randomIntBetween(1, 30);
|
||||
final List<SnapshotId> snapshotIds = new ArrayList<>(numSnapshots);
|
||||
for (int i = 0; i < numSnapshots; i++) {
|
||||
snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()));
|
||||
}
|
||||
RepositoryData repositoryData = new RepositoryData(snapshotIds, Collections.emptyMap());
|
||||
// test that initializing indices works
|
||||
Map<IndexId, Set<SnapshotId>> indices = randomIndices(snapshotIds);
|
||||
RepositoryData newRepoData = repositoryData.initIndices(indices);
|
||||
assertEquals(repositoryData.getSnapshotIds(), newRepoData.getSnapshotIds());
|
||||
for (IndexId indexId : indices.keySet()) {
|
||||
assertEquals(indices.get(indexId), newRepoData.getSnapshots(indexId));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRemoveSnapshot() {
|
||||
RepositoryData repositoryData = generateRandomRepoData();
|
||||
List<SnapshotId> snapshotIds = new ArrayList<>(repositoryData.getSnapshotIds());
|
||||
assertThat(snapshotIds.size(), greaterThan(0));
|
||||
SnapshotId removedSnapshotId = snapshotIds.remove(randomIntBetween(0, snapshotIds.size() - 1));
|
||||
RepositoryData newRepositoryData = repositoryData.removeSnapshot(removedSnapshotId);
|
||||
// make sure the repository data's indices no longer contain the removed snapshot
|
||||
for (final IndexId indexId : newRepositoryData.getIndices().values()) {
|
||||
assertFalse(newRepositoryData.getSnapshots(indexId).contains(removedSnapshotId));
|
||||
}
|
||||
}
|
||||
|
||||
public void testResolveIndexId() {
|
||||
RepositoryData repositoryData = generateRandomRepoData();
|
||||
Map<String, IndexId> indices = repositoryData.getIndices();
|
||||
Set<String> indexNames = indices.keySet();
|
||||
assertThat(indexNames.size(), greaterThan(0));
|
||||
String indexName = indexNames.iterator().next();
|
||||
IndexId indexId = indices.get(indexName);
|
||||
assertEquals(indexId, repositoryData.resolveIndexId(indexName));
|
||||
String notInRepoData = randomAsciiOfLength(5);
|
||||
assertFalse(indexName.contains(notInRepoData));
|
||||
assertEquals(new IndexId(notInRepoData, notInRepoData), repositoryData.resolveIndexId(notInRepoData));
|
||||
}
|
||||
|
||||
public static RepositoryData generateRandomRepoData() {
|
||||
return generateRandomRepoData(new ArrayList<>());
|
||||
}
|
||||
|
||||
public static RepositoryData generateRandomRepoData(final List<SnapshotId> origSnapshotIds) {
|
||||
List<SnapshotId> snapshotIds = randomSnapshots(origSnapshotIds);
|
||||
return new RepositoryData(snapshotIds, randomIndices(snapshotIds));
|
||||
}
|
||||
|
||||
private static List<SnapshotId> randomSnapshots(final List<SnapshotId> origSnapshotIds) {
|
||||
final int numSnapshots = randomIntBetween(1, 30);
|
||||
final List<SnapshotId> snapshotIds = new ArrayList<>(origSnapshotIds);
|
||||
for (int i = 0; i < numSnapshots; i++) {
|
||||
snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()));
|
||||
}
|
||||
return snapshotIds;
|
||||
}
|
||||
|
||||
private static Map<IndexId, Set<SnapshotId>> randomIndices(final List<SnapshotId> snapshotIds) {
|
||||
final int totalSnapshots = snapshotIds.size();
|
||||
final int numIndices = randomIntBetween(1, 30);
|
||||
final Map<IndexId, Set<SnapshotId>> indices = new HashMap<>(numIndices);
|
||||
for (int i = 0; i < numIndices; i++) {
|
||||
final IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID());
|
||||
final Set<SnapshotId> indexSnapshots = new LinkedHashSet<>();
|
||||
final int numIndicesForSnapshot = randomIntBetween(1, numIndices);
|
||||
for (int j = 0; j < numIndicesForSnapshot; j++) {
|
||||
indexSnapshots.add(snapshotIds.get(randomIntBetween(0, totalSnapshots - 1)));
|
||||
}
|
||||
indices.put(indexId, indexSnapshots);
|
||||
}
|
||||
return indices;
|
||||
}
|
||||
}
|
|
@ -28,11 +28,11 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.repositories.RepositoriesService;
|
||||
import org.elasticsearch.repositories.RepositoryData;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
@ -44,7 +44,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.blobId;
|
||||
import static org.elasticsearch.repositories.RepositoryDataTests.generateRandomRepoData;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
|
@ -109,86 +109,56 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase {
|
|||
public void testReadAndWriteSnapshotsThroughIndexFile() throws Exception {
|
||||
final BlobStoreRepository repository = setupRepo();
|
||||
|
||||
// write to and read from a snapshot file with no entries
|
||||
assertThat(repository.getSnapshots().size(), equalTo(0));
|
||||
repository.writeSnapshotsToIndexGen(Collections.emptyList());
|
||||
// write to and read from a index file with no entries
|
||||
assertThat(repository.getSnapshots().size(), equalTo(0));
|
||||
final RepositoryData emptyData = RepositoryData.EMPTY;
|
||||
repository.writeIndexGen(emptyData);
|
||||
final RepositoryData readData = repository.getRepositoryData();
|
||||
assertEquals(readData, emptyData);
|
||||
assertEquals(readData.getIndices().size(), 0);
|
||||
assertEquals(readData.getSnapshotIds().size(), 0);
|
||||
|
||||
// write to and read from a snapshot file with a random number of entries
|
||||
final int numSnapshots = randomIntBetween(1, 1000);
|
||||
// write to and read from an index file with snapshots but no indices
|
||||
final int numSnapshots = randomIntBetween(1, 20);
|
||||
final List<SnapshotId> snapshotIds = new ArrayList<>(numSnapshots);
|
||||
for (int i = 0; i < numSnapshots; i++) {
|
||||
snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()));
|
||||
}
|
||||
repository.writeSnapshotsToIndexGen(snapshotIds);
|
||||
assertThat(repository.getSnapshots(), equalTo(snapshotIds));
|
||||
RepositoryData repositoryData = new RepositoryData(snapshotIds, Collections.emptyMap());
|
||||
repository.writeIndexGen(repositoryData);
|
||||
assertEquals(repository.getRepositoryData(), repositoryData);
|
||||
|
||||
// write to and read from a index file with random repository data
|
||||
repositoryData = generateRandomRepoData();
|
||||
repository.writeIndexGen(repositoryData);
|
||||
assertThat(repository.getRepositoryData(), equalTo(repositoryData));
|
||||
}
|
||||
|
||||
public void testIndexGenerationalFiles() throws Exception {
|
||||
final BlobStoreRepository repository = setupRepo();
|
||||
|
||||
// write to index generational file
|
||||
final int numSnapshots = randomIntBetween(1, 1000);
|
||||
final List<SnapshotId> snapshotIds = new ArrayList<>(numSnapshots);
|
||||
for (int i = 0; i < numSnapshots; i++) {
|
||||
snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()));
|
||||
}
|
||||
repository.writeSnapshotsToIndexGen(snapshotIds);
|
||||
assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds)));
|
||||
RepositoryData repositoryData = generateRandomRepoData();
|
||||
repository.writeIndexGen(repositoryData);
|
||||
assertThat(repository.getRepositoryData(), equalTo(repositoryData));
|
||||
assertThat(repository.latestIndexBlobId(), equalTo(0L));
|
||||
assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(0L));
|
||||
|
||||
// adding more and writing to a new index generational file
|
||||
for (int i = 0; i < 10; i++) {
|
||||
snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()));
|
||||
}
|
||||
repository.writeSnapshotsToIndexGen(snapshotIds);
|
||||
assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds)));
|
||||
repositoryData = generateRandomRepoData();
|
||||
repository.writeIndexGen(repositoryData);
|
||||
assertEquals(repository.getRepositoryData(), repositoryData);
|
||||
assertThat(repository.latestIndexBlobId(), equalTo(1L));
|
||||
assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(1L));
|
||||
|
||||
// removing a snapshot adn writing to a new index generational file
|
||||
snapshotIds.remove(0);
|
||||
repository.writeSnapshotsToIndexGen(snapshotIds);
|
||||
assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds)));
|
||||
// removing a snapshot and writing to a new index generational file
|
||||
repositoryData = repositoryData.removeSnapshot(repositoryData.getSnapshotIds().get(0));
|
||||
repository.writeIndexGen(repositoryData);
|
||||
assertEquals(repository.getRepositoryData(), repositoryData);
|
||||
assertThat(repository.latestIndexBlobId(), equalTo(2L));
|
||||
assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(2L));
|
||||
}
|
||||
|
||||
public void testOldIndexFileFormat() throws Exception {
|
||||
final BlobStoreRepository repository = setupRepo();
|
||||
|
||||
// write old index file format
|
||||
final int numOldSnapshots = randomIntBetween(1, 50);
|
||||
final List<SnapshotId> snapshotIds = new ArrayList<>();
|
||||
for (int i = 0; i < numOldSnapshots; i++) {
|
||||
snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), SnapshotId.UNASSIGNED_UUID));
|
||||
}
|
||||
writeOldFormat(repository, snapshotIds.stream().map(SnapshotId::getName).collect(Collectors.toList()));
|
||||
assertThat(Sets.newHashSet(repository.getSnapshots()), equalTo(Sets.newHashSet(snapshotIds)));
|
||||
|
||||
// write to and read from a snapshot file with a random number of new entries added
|
||||
final int numSnapshots = randomIntBetween(1, 1000);
|
||||
for (int i = 0; i < numSnapshots; i++) {
|
||||
snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()));
|
||||
}
|
||||
repository.writeSnapshotsToIndexGen(snapshotIds);
|
||||
assertThat(Sets.newHashSet(repository.getSnapshots()), equalTo(Sets.newHashSet(snapshotIds)));
|
||||
}
|
||||
|
||||
public void testBlobId() {
|
||||
SnapshotId snapshotId = new SnapshotId("abc123", SnapshotId.UNASSIGNED_UUID);
|
||||
assertThat(blobId(snapshotId), equalTo("abc123")); // just the snapshot name
|
||||
snapshotId = new SnapshotId("abc-123", SnapshotId.UNASSIGNED_UUID);
|
||||
assertThat(blobId(snapshotId), equalTo("abc-123")); // just the snapshot name
|
||||
String uuid = UUIDs.randomBase64UUID();
|
||||
snapshotId = new SnapshotId("abc123", uuid);
|
||||
assertThat(blobId(snapshotId), equalTo("abc123-" + uuid)); // snapshot name + '-' + uuid
|
||||
uuid = UUIDs.randomBase64UUID();
|
||||
snapshotId = new SnapshotId("abc-123", uuid);
|
||||
assertThat(blobId(snapshotId), equalTo("abc-123-" + uuid)); // snapshot name + '-' + uuid
|
||||
}
|
||||
|
||||
private BlobStoreRepository setupRepo() {
|
||||
final Client client = client();
|
||||
final Path location = ESIntegTestCase.randomRepoPath(node().settings());
|
||||
|
|
|
@ -37,14 +37,16 @@ public class FileScriptTests extends ESTestCase {
|
|||
Path scriptsDir = homeDir.resolve("config").resolve("scripts");
|
||||
Files.createDirectories(scriptsDir);
|
||||
Path mockscript = scriptsDir.resolve("script1.mockscript");
|
||||
Files.write(mockscript, "1".getBytes("UTF-8"));
|
||||
String scriptSource = "1";
|
||||
Files.write(mockscript, scriptSource.getBytes("UTF-8"));
|
||||
settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), homeDir)
|
||||
// no file watching, so we don't need a ResourceWatcherService
|
||||
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
|
||||
.put(settings)
|
||||
.build();
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(new MockScriptEngine()));
|
||||
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap(scriptSource, script -> "1"));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(scriptEngine));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
return new ScriptService(settings, new Environment(settings), null, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -43,7 +43,9 @@ public class ScriptContextTests extends ESTestCase {
|
|||
.put("script." + PLUGIN_NAME + "_custom_globally_disabled_op", "false")
|
||||
.put("script.engine." + MockScriptEngine.NAME + ".inline." + PLUGIN_NAME + "_custom_exp_disabled_op", "false")
|
||||
.build();
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new MockScriptEngine()));
|
||||
|
||||
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", script -> "1"));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(scriptEngine));
|
||||
List<ScriptContext.Plugin> customContexts = Arrays.asList(
|
||||
new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"),
|
||||
new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"),
|
||||
|
|
|
@ -86,7 +86,7 @@ public class StoredScriptsIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
return Collections.emptyMap();
|
||||
return Collections.singletonMap("1", script -> "1");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,13 +16,14 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.Range;
|
||||
|
@ -36,12 +37,15 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.range;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -49,11 +53,8 @@ import static org.hamcrest.Matchers.is;
|
|||
import static org.hamcrest.core.IsNull.notNullValue;
|
||||
import static org.hamcrest.core.IsNull.nullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@ESIntegTestCase.SuiteScopeTestCase
|
||||
public class RangeTests extends ESIntegTestCase {
|
||||
public class RangeIT extends ESIntegTestCase {
|
||||
|
||||
private static final String SINGLE_VALUED_FIELD_NAME = "l_value";
|
||||
private static final String MULTI_VALUED_FIELD_NAME = "l_values";
|
||||
|
@ -62,7 +63,30 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return Collections.singleton(CustomScriptPlugin.class);
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends AggregationTestScriptsPlugin {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = super.pluginScripts();
|
||||
|
||||
scripts.put("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get(SINGLE_VALUED_FIELD_NAME);
|
||||
return value.getValue();
|
||||
});
|
||||
|
||||
scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "'].values", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get(MULTI_VALUED_FIELD_NAME);
|
||||
return value.getValues();
|
||||
});
|
||||
|
||||
return scripts;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -94,10 +118,10 @@ public class RangeTests extends ESIntegTestCase {
|
|||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).size(100)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation(
|
||||
range("range").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.addUnboundedTo(3)
|
||||
.addRange(3, 6)
|
||||
.addUnboundedFrom(6)))
|
||||
range("range").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.addUnboundedTo(3)
|
||||
.addRange(3, 6)
|
||||
.addUnboundedFrom(6)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
@ -112,7 +136,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
Range range = bucket.getAggregations().get("range");
|
||||
List<? extends Bucket> buckets = range.getBuckets();
|
||||
Range.Bucket rangeBucket = buckets.get(0);
|
||||
assertThat((String) rangeBucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(rangeBucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(rangeBucket.getKeyAsString(), equalTo("*-3.0"));
|
||||
assertThat(rangeBucket, notNullValue());
|
||||
assertThat(rangeBucket.getFromAsString(), nullValue());
|
||||
|
@ -125,7 +149,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
assertThat(rangeBucket.getDocCount(), equalTo(0L));
|
||||
}
|
||||
rangeBucket = buckets.get(1);
|
||||
assertThat((String) rangeBucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(rangeBucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(rangeBucket.getKeyAsString(), equalTo("3.0-6.0"));
|
||||
assertThat(rangeBucket, notNullValue());
|
||||
assertThat(rangeBucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -138,7 +162,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
assertThat(rangeBucket.getDocCount(), equalTo(0L));
|
||||
}
|
||||
rangeBucket = buckets.get(2);
|
||||
assertThat((String) rangeBucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(rangeBucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(rangeBucket.getKeyAsString(), equalTo("6.0-*"));
|
||||
assertThat(rangeBucket, notNullValue());
|
||||
assertThat(rangeBucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -173,7 +197,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -182,7 +206,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -191,7 +215,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -217,7 +241,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -226,7 +250,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3-6"));
|
||||
assertThat(bucket.getKey(), equalTo("3-6"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3"));
|
||||
|
@ -235,7 +259,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6"));
|
||||
|
@ -263,7 +287,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("r1"));
|
||||
assertThat(bucket.getKey(), equalTo("r1"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -272,7 +296,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("r2"));
|
||||
assertThat(bucket.getKey(), equalTo("r2"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -281,7 +305,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("r3"));
|
||||
assertThat(bucket.getKey(), equalTo("r3"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -313,7 +337,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -322,13 +346,13 @@ public class RangeTests extends ESIntegTestCase {
|
|||
Sum sum = bucket.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getValue(), equalTo(3.0)); // 1 + 2
|
||||
assertThat((String) propertiesKeys[0], equalTo("*-3.0"));
|
||||
assertThat((long) propertiesDocCounts[0], equalTo(2L));
|
||||
assertThat((double) propertiesCounts[0], equalTo(3.0));
|
||||
assertThat(propertiesKeys[0], equalTo("*-3.0"));
|
||||
assertThat(propertiesDocCounts[0], equalTo(2L));
|
||||
assertThat(propertiesCounts[0], equalTo(3.0));
|
||||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -337,13 +361,13 @@ public class RangeTests extends ESIntegTestCase {
|
|||
sum = bucket.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getValue(), equalTo(12.0)); // 3 + 4 + 5
|
||||
assertThat((String) propertiesKeys[1], equalTo("3.0-6.0"));
|
||||
assertThat((long) propertiesDocCounts[1], equalTo(3L));
|
||||
assertThat((double) propertiesCounts[1], equalTo(12.0));
|
||||
assertThat(propertiesKeys[1], equalTo("3.0-6.0"));
|
||||
assertThat(propertiesDocCounts[1], equalTo(3L));
|
||||
assertThat(propertiesCounts[1], equalTo(12.0));
|
||||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -356,17 +380,22 @@ public class RangeTests extends ESIntegTestCase {
|
|||
total += i + 1;
|
||||
}
|
||||
assertThat(sum.getValue(), equalTo((double) total));
|
||||
assertThat((String) propertiesKeys[2], equalTo("6.0-*"));
|
||||
assertThat((long) propertiesDocCounts[2], equalTo(numDocs - 5L));
|
||||
assertThat((double) propertiesCounts[2], equalTo((double) total));
|
||||
assertThat(propertiesKeys[2], equalTo("6.0-*"));
|
||||
assertThat(propertiesDocCounts[2], equalTo(numDocs - 5L));
|
||||
assertThat(propertiesCounts[2], equalTo((double) total));
|
||||
}
|
||||
|
||||
public void testSingleValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.addAggregation(
|
||||
range("range").field(SINGLE_VALUED_FIELD_NAME).script(new Script("_value + 1")).addUnboundedTo(3).addRange(3, 6)
|
||||
.addUnboundedFrom(6)).execute().actionGet();
|
||||
range("range")
|
||||
.field(SINGLE_VALUED_FIELD_NAME)
|
||||
.script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addUnboundedTo(3)
|
||||
.addRange(3, 6)
|
||||
.addUnboundedFrom(6))
|
||||
.get();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
|
@ -378,7 +407,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -387,7 +416,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -396,7 +425,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -437,7 +466,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -446,7 +475,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -455,7 +484,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -480,8 +509,13 @@ public class RangeTests extends ESIntegTestCase {
|
|||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.addAggregation(
|
||||
range("range").field(MULTI_VALUED_FIELD_NAME).script(new Script("_value + 1")).addUnboundedTo(3).addRange(3, 6)
|
||||
.addUnboundedFrom(6)).execute().actionGet();
|
||||
range("range")
|
||||
.field(MULTI_VALUED_FIELD_NAME)
|
||||
.script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addUnboundedTo(3)
|
||||
.addRange(3, 6)
|
||||
.addUnboundedFrom(6))
|
||||
.get();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
|
@ -494,7 +528,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -503,7 +537,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -512,7 +546,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -538,11 +572,16 @@ public class RangeTests extends ESIntegTestCase {
|
|||
*/
|
||||
|
||||
public void testScriptSingleValue() throws Exception {
|
||||
Script script = new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.addAggregation(
|
||||
range("range").script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")).addUnboundedTo(3).addRange(3, 6)
|
||||
.addUnboundedFrom(6)).execute().actionGet();
|
||||
range("range")
|
||||
.script(script)
|
||||
.addUnboundedTo(3)
|
||||
.addRange(3, 6)
|
||||
.addUnboundedFrom(6))
|
||||
.get();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
|
@ -555,7 +594,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -564,7 +603,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -573,7 +612,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -600,7 +639,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*--1.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*--1.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(-1.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -609,7 +648,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("1000.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("1000.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000d));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
|
||||
|
@ -618,11 +657,17 @@ public class RangeTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
Script script = new Script("doc['" + MULTI_VALUED_FIELD_NAME + "'].values", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.addAggregation(
|
||||
range("range").script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "'].values")).addUnboundedTo(3).addRange(3, 6)
|
||||
.addUnboundedFrom(6)).execute().actionGet();
|
||||
range("range")
|
||||
.script(script)
|
||||
.addUnboundedTo(3)
|
||||
.addRange(3, 6)
|
||||
.addUnboundedFrom(6))
|
||||
.get();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
|
@ -635,7 +680,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -644,7 +689,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -653,7 +698,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -698,7 +743,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -707,7 +752,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -716,7 +761,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -746,7 +791,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-3.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -755,7 +800,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -764,7 +809,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("6.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("6.0"));
|
||||
|
@ -793,7 +838,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range.Bucket bucket = buckets.get(0);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("*-5.0"));
|
||||
assertThat(bucket.getKey(), equalTo("*-5.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(5.0));
|
||||
assertThat(bucket.getFromAsString(), nullValue());
|
||||
|
@ -802,7 +847,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(1);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("3.0"));
|
||||
|
@ -811,7 +856,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(2);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("4.0-5.0"));
|
||||
assertThat(bucket.getKey(), equalTo("4.0-5.0"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(4.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(5.0));
|
||||
assertThat(bucket.getFromAsString(), equalTo("4.0"));
|
||||
|
@ -820,7 +865,7 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
bucket = buckets.get(3);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat((String) bucket.getKey(), equalTo("4.0-*"));
|
||||
assertThat(bucket.getKey(), equalTo("4.0-*"));
|
||||
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(4.0));
|
||||
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
|
||||
assertThat(bucket.getFromAsString(), equalTo("4.0"));
|
||||
|
@ -831,9 +876,16 @@ public class RangeTests extends ESIntegTestCase {
|
|||
public void testEmptyAggregation() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L).minDocCount(0)
|
||||
.subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addRange("0-2", 0.0, 2.0)))
|
||||
.execute().actionGet();
|
||||
.addAggregation(
|
||||
histogram("histo")
|
||||
.field(SINGLE_VALUED_FIELD_NAME)
|
||||
.interval(1L)
|
||||
.minDocCount(0)
|
||||
.subAggregation(
|
||||
range("range")
|
||||
.field(SINGLE_VALUED_FIELD_NAME)
|
||||
.addRange("0-2", 0.0, 2.0)))
|
||||
.get();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
|
||||
Histogram histo = searchResponse.getAggregations().get("histo");
|
||||
|
@ -843,11 +895,11 @@ public class RangeTests extends ESIntegTestCase {
|
|||
|
||||
Range range = bucket.getAggregations().get("range");
|
||||
// TODO: use diamond once JI-9019884 is fixed
|
||||
List<Range.Bucket> buckets = new ArrayList<Range.Bucket>(range.getBuckets());
|
||||
List<Range.Bucket> buckets = new ArrayList<>(range.getBuckets());
|
||||
assertThat(range, Matchers.notNullValue());
|
||||
assertThat(range.getName(), equalTo("range"));
|
||||
assertThat(buckets.size(), is(1));
|
||||
assertThat((String) buckets.get(0).getKey(), equalTo("0-2"));
|
||||
assertThat(buckets.get(0).getKey(), equalTo("0-2"));
|
||||
assertThat(((Number) buckets.get(0).getFrom()).doubleValue(), equalTo(0.0));
|
||||
assertThat(((Number) buckets.get(0).getTo()).doubleValue(), equalTo(2.0));
|
||||
assertThat(buckets.get(0).getFromAsString(), equalTo("0.0"));
|
|
@ -17,18 +17,18 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.script.groovy.GroovyScriptEngineService;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
|
@ -39,12 +39,17 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
@ -64,13 +69,130 @@ import static org.hamcrest.Matchers.sameInstance;
|
|||
|
||||
@ClusterScope(scope = Scope.SUITE)
|
||||
@ESIntegTestCase.SuiteScopeTestCase
|
||||
public class ScriptedMetricTests extends ESIntegTestCase {
|
||||
public class ScriptedMetricIT extends ESIntegTestCase {
|
||||
|
||||
private static long numDocs;
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return Collections.singleton(CustomScriptPlugin.class);
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
|
||||
scripts.put("_agg['count'] = 1", vars ->
|
||||
aggScript(vars, agg -> ((Map<String, Object>) agg).put("count", 1)));
|
||||
|
||||
scripts.put("_agg.add(1)", vars ->
|
||||
aggScript(vars, agg -> ((List) agg).add(1)));
|
||||
|
||||
scripts.put("vars.multiplier = 3", vars ->
|
||||
((Map<String, Object>) vars.get("vars")).put("multiplier", 3));
|
||||
|
||||
scripts.put("_agg.add(vars.multiplier)", vars ->
|
||||
aggScript(vars, agg -> ((List) agg).add(XContentMapValues.extractValue("vars.multiplier", vars))));
|
||||
|
||||
// Equivalent to:
|
||||
//
|
||||
// newaggregation = [];
|
||||
// sum = 0;
|
||||
//
|
||||
// for (a in _agg) {
|
||||
// sum += a
|
||||
// };
|
||||
//
|
||||
// newaggregation.add(sum);
|
||||
// return newaggregation"
|
||||
//
|
||||
scripts.put("sum agg values as a new aggregation", vars -> {
|
||||
List newAggregation = new ArrayList();
|
||||
List<?> agg = (List<?>) vars.get("_agg");
|
||||
|
||||
if (agg != null) {
|
||||
Integer sum = 0;
|
||||
for (Object a : (List) agg) {
|
||||
sum += ((Number) a).intValue();
|
||||
}
|
||||
newAggregation.add(sum);
|
||||
}
|
||||
return newAggregation;
|
||||
});
|
||||
|
||||
// Equivalent to:
|
||||
//
|
||||
// newaggregation = [];
|
||||
// sum = 0;
|
||||
//
|
||||
// for (aggregation in _aggs) {
|
||||
// for (a in aggregation) {
|
||||
// sum += a
|
||||
// }
|
||||
// };
|
||||
//
|
||||
// newaggregation.add(sum);
|
||||
// return newaggregation"
|
||||
//
|
||||
scripts.put("sum aggs of agg values as a new aggregation", vars -> {
|
||||
List newAggregation = new ArrayList();
|
||||
Integer sum = 0;
|
||||
|
||||
List<?> aggs = (List<?>) vars.get("_aggs");
|
||||
for (Object aggregation : (List) aggs) {
|
||||
if (aggregation != null) {
|
||||
for (Object a : (List) aggregation) {
|
||||
sum += ((Number) a).intValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
newAggregation.add(sum);
|
||||
return newAggregation;
|
||||
});
|
||||
|
||||
// Equivalent to:
|
||||
//
|
||||
// newaggregation = [];
|
||||
// sum = 0;
|
||||
//
|
||||
// for (aggregation in _aggs) {
|
||||
// for (a in aggregation) {
|
||||
// sum += a
|
||||
// }
|
||||
// };
|
||||
//
|
||||
// newaggregation.add(sum * multiplier);
|
||||
// return newaggregation"
|
||||
//
|
||||
scripts.put("multiplied sum aggs of agg values as a new aggregation", vars -> {
|
||||
Integer multiplier = (Integer) vars.get("multiplier");
|
||||
List newAggregation = new ArrayList();
|
||||
Integer sum = 0;
|
||||
|
||||
List<?> aggs = (List<?>) vars.get("_aggs");
|
||||
for (Object aggregation : (List) aggs) {
|
||||
if (aggregation != null) {
|
||||
for (Object a : (List) aggregation) {
|
||||
sum += ((Number) a).intValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
newAggregation.add(sum * multiplier);
|
||||
return newAggregation;
|
||||
});
|
||||
|
||||
return scripts;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static <T> Object aggScript(Map<String, Object> vars, Consumer<T> fn) {
|
||||
T agg = (T) vars.get("_agg");
|
||||
fn.accept(agg);
|
||||
return agg;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -83,7 +205,7 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
for (int i = 0; i < numDocs; i++) {
|
||||
builders.add(client().prepareIndex("idx", "type", "" + i).setSource(
|
||||
jsonBuilder().startObject().field("value", randomAsciiOfLengthBetween(5, 15))
|
||||
.field("l_value", i).endObject()));
|
||||
.field("l_value", i).endObject()));
|
||||
}
|
||||
indexRandom(true, builders);
|
||||
|
||||
|
@ -102,25 +224,28 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
jsonBuilder().startObject().field("value", i * 2).endObject()));
|
||||
}
|
||||
|
||||
// When using the MockScriptPlugin we can map Stored scripts to inline scripts:
|
||||
// the id of the stored script is used in test method while the source of the stored script
|
||||
// must match a predefined script from CustomScriptPlugin.pluginScripts() method
|
||||
assertAcked(client().admin().cluster().preparePutStoredScript()
|
||||
.setScriptLang(GroovyScriptEngineService.NAME)
|
||||
.setId("initScript_indexed")
|
||||
.setScriptLang(CustomScriptPlugin.NAME)
|
||||
.setId("initScript_stored")
|
||||
.setSource(new BytesArray("{\"script\":\"vars.multiplier = 3\"}")));
|
||||
|
||||
assertAcked(client().admin().cluster().preparePutStoredScript()
|
||||
.setScriptLang(GroovyScriptEngineService.NAME)
|
||||
.setId("mapScript_indexed")
|
||||
.setScriptLang(CustomScriptPlugin.NAME)
|
||||
.setId("mapScript_stored")
|
||||
.setSource(new BytesArray("{\"script\":\"_agg.add(vars.multiplier)\"}")));
|
||||
|
||||
assertAcked(client().admin().cluster().preparePutStoredScript()
|
||||
.setScriptLang(GroovyScriptEngineService.NAME)
|
||||
.setId("combineScript_indexed")
|
||||
.setSource(new BytesArray("{\"script\":\"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation\"}")));
|
||||
.setScriptLang(CustomScriptPlugin.NAME)
|
||||
.setId("combineScript_stored")
|
||||
.setSource(new BytesArray("{\"script\":\"sum agg values as a new aggregation\"}")));
|
||||
|
||||
assertAcked(client().admin().cluster().preparePutStoredScript()
|
||||
.setScriptLang(GroovyScriptEngineService.NAME)
|
||||
.setId("reduceScript_indexed")
|
||||
.setSource(new BytesArray("{\"script\":\"newaggregation = []; sum = 0;for (agg in _aggs) { for (a in agg) { sum += a} }; newaggregation.add(sum); return newaggregation\"}")));
|
||||
.setScriptLang(CustomScriptPlugin.NAME)
|
||||
.setId("reduceScript_stored")
|
||||
.setSource(new BytesArray("{\"script\":\"sum aggs of agg values as a new aggregation\"}")));
|
||||
|
||||
indexRandom(true, builders);
|
||||
ensureSearchable();
|
||||
|
@ -128,16 +253,36 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
Settings settings = Settings.builder()
|
||||
Path config = createTempDir().resolve("config");
|
||||
Path scripts = config.resolve("scripts");
|
||||
|
||||
try {
|
||||
Files.createDirectories(scripts);
|
||||
|
||||
// When using the MockScriptPlugin we can map File scripts to inline scripts:
|
||||
// the name of the file script is used in test method while the source of the file script
|
||||
// must match a predefined script from CustomScriptPlugin.pluginScripts() method
|
||||
Files.write(scripts.resolve("init_script.mockscript"), "vars.multiplier = 3".getBytes("UTF-8"));
|
||||
Files.write(scripts.resolve("map_script.mockscript"), "_agg.add(vars.multiplier)".getBytes("UTF-8"));
|
||||
Files.write(scripts.resolve("combine_script.mockscript"), "sum agg values as a new aggregation".getBytes("UTF-8"));
|
||||
Files.write(scripts.resolve("reduce_script.mockscript"), "sum aggs of agg values as a new aggregation".getBytes("UTF-8"));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("failed to create scripts");
|
||||
}
|
||||
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/org/elasticsearch/messy/tests/conf"))
|
||||
.put(Environment.PATH_CONF_SETTING.getKey(), config)
|
||||
.build();
|
||||
return settings;
|
||||
}
|
||||
|
||||
public void testMap() {
|
||||
SearchResponse response = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(scriptedMetric("scripted").mapScript(new Script("_agg['count'] = 1"))).execute().actionGet();
|
||||
Script mapScript = new Script("_agg['count'] = 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(scriptedMetric("scripted").mapScript(mapScript))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -159,7 +304,7 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
if (map.size() == 1) {
|
||||
assertThat(map.get("count"), notNullValue());
|
||||
assertThat(map.get("count"), instanceOf(Number.class));
|
||||
assertThat((Number) map.get("count"), equalTo((Number) 1));
|
||||
assertThat(map.get("count"), equalTo((Number) 1));
|
||||
numShardsRun++;
|
||||
}
|
||||
}
|
||||
|
@ -172,8 +317,12 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(scriptedMetric("scripted").params(params).mapScript(new Script("_agg.add(1)"))).execute().actionGet();
|
||||
Script mapScript = new Script("_agg.add(1)", ScriptType.INLINE, CustomScriptPlugin.NAME, params);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(scriptedMetric("scripted").params(params).mapScript(mapScript))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -205,6 +354,7 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testInitMapWithParams() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
@ -213,8 +363,11 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted").params(params).initScript(new Script("vars.multiplier = 3"))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))).execute().actionGet();
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null)))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -246,20 +399,22 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testMapCombineWithParams() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script mapScript = new Script("_agg.add(1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.mapScript(new Script("_agg.add(1)"))
|
||||
.combineScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation")))
|
||||
.mapScript(mapScript)
|
||||
.combineScript(combineScript))
|
||||
.execute().actionGet();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
@ -295,22 +450,25 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testInitMapCombineWithParams() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("vars.multiplier = 3"))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.combineScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation")))
|
||||
.execute().actionGet();
|
||||
.initScript(initScript)
|
||||
.mapScript(mapScript)
|
||||
.combineScript(combineScript))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -345,25 +503,27 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testInitMapCombineReduceWithParams() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("vars.multiplier = 3"))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.combineScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation"))
|
||||
.reduceScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation")))
|
||||
.execute().actionGet();
|
||||
.initScript(initScript)
|
||||
.mapScript(mapScript)
|
||||
.combineScript(combineScript)
|
||||
.reduceScript(reduceScript))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -386,9 +546,16 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testInitMapCombineReduceGetProperty() throws Exception {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -397,15 +564,11 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
.subAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("vars.multiplier = 3"))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.combineScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation"))
|
||||
.reduceScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation"))))
|
||||
.execute().actionGet();
|
||||
.initScript(initScript)
|
||||
.mapScript(mapScript)
|
||||
.combineScript(combineScript)
|
||||
.reduceScript(reduceScript)))
|
||||
.get();
|
||||
|
||||
assertSearchResponse(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
@ -437,24 +600,25 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testMapCombineReduceWithParams() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.combineScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation"))
|
||||
.reduceScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation")))
|
||||
.execute().actionGet();
|
||||
.mapScript(mapScript)
|
||||
.combineScript(combineScript)
|
||||
.reduceScript(reduceScript))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -476,22 +640,25 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testInitMapReduceWithParams() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("vars.multiplier = 3"))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.reduceScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation")))
|
||||
.execute().actionGet();
|
||||
.initScript(initScript)
|
||||
.mapScript(mapScript)
|
||||
.reduceScript(reduceScript))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -517,17 +684,18 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.reduceScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation")))
|
||||
.execute().actionGet();
|
||||
.mapScript(mapScript)
|
||||
.reduceScript(reduceScript))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -549,27 +717,30 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testInitMapCombineReduceWithParamsAndReduceParams() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Map<String, Object> reduceParams = new HashMap<>();
|
||||
reduceParams.put("multiplier", 4);
|
||||
|
||||
Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script reduceScript = new Script("multiplied sum aggs of agg values as a new aggregation", ScriptType.INLINE,
|
||||
CustomScriptPlugin.NAME, reduceParams);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("vars.multiplier = 3"))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.combineScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation"))
|
||||
.reduceScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum * multiplier); return newaggregation",
|
||||
ScriptType.INLINE, null, reduceParams)))
|
||||
.initScript(initScript)
|
||||
.mapScript(mapScript)
|
||||
.combineScript(combineScript)
|
||||
.reduceScript(reduceScript))
|
||||
.execute().actionGet();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
@ -589,9 +760,10 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
assertThat(((Number) object).longValue(), equalTo(numDocs * 12));
|
||||
}
|
||||
|
||||
public void testInitMapCombineReduceWithParamsIndexed() {
|
||||
public void testInitMapCombineReduceWithParamsStored() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
@ -600,11 +772,13 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted").params(params)
|
||||
.initScript(new Script("initScript_indexed", ScriptType.STORED, null, null))
|
||||
.mapScript(new Script("mapScript_indexed", ScriptType.STORED, null, null))
|
||||
.combineScript(new Script("combineScript_indexed", ScriptType.STORED, null, null))
|
||||
.reduceScript(new Script("reduceScript_indexed", ScriptType.STORED, null, null))).execute().actionGet();
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("initScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null))
|
||||
.mapScript(new Script("mapScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null))
|
||||
.combineScript(new Script("combineScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null))
|
||||
.reduceScript(new Script("reduceScript_stored", ScriptType.STORED, CustomScriptPlugin.NAME, null)))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -634,10 +808,13 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
scriptedMetric("scripted").params(params).initScript(new Script("init_script", ScriptType.FILE, null, null))
|
||||
.mapScript(new Script("map_script", ScriptType.FILE, null, null))
|
||||
.combineScript(new Script("combine_script", ScriptType.FILE, null, null))
|
||||
.reduceScript(new Script("reduce_script", ScriptType.FILE, null, null))).execute().actionGet();
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("init_script", ScriptType.FILE, CustomScriptPlugin.NAME, null))
|
||||
.mapScript(new Script("map_script", ScriptType.FILE, CustomScriptPlugin.NAME, null))
|
||||
.combineScript(new Script("combine_script", ScriptType.FILE, CustomScriptPlugin.NAME, null))
|
||||
.reduceScript(new Script("reduce_script", ScriptType.FILE, CustomScriptPlugin.NAME, null)))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
|
||||
|
@ -659,10 +836,16 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testInitMapCombineReduceWithParamsAsSubAgg() {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery()).setSize(1000)
|
||||
|
@ -673,15 +856,11 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
.subAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("vars.multiplier = 3"))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.combineScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation"))
|
||||
.reduceScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation"))))
|
||||
.execute().actionGet();
|
||||
.initScript(initScript)
|
||||
.mapScript(mapScript)
|
||||
.combineScript(combineScript)
|
||||
.reduceScript(reduceScript)))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
|
||||
Aggregation aggregation = response.getAggregations().get("histo");
|
||||
|
@ -716,25 +895,27 @@ public class ScriptedMetricTests extends ESIntegTestCase {
|
|||
public void testEmptyAggregation() throws Exception {
|
||||
Map<String, Object> varsMap = new HashMap<>();
|
||||
varsMap.put("multiplier", 1);
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("_agg", new ArrayList<>());
|
||||
params.put("vars", varsMap);
|
||||
|
||||
Script initScript = new Script("vars.multiplier = 3", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script mapScript = new Script("_agg.add(vars.multiplier)", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script combineScript = new Script("sum agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
Script reduceScript = new Script("sum aggs of agg values as a new aggregation", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0)
|
||||
.subAggregation(
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(new Script("vars.multiplier = 3"))
|
||||
.mapScript(new Script("_agg.add(vars.multiplier)"))
|
||||
.combineScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation"))
|
||||
.reduceScript(
|
||||
new Script(
|
||||
"newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation"))))
|
||||
.execute().actionGet();
|
||||
scriptedMetric("scripted")
|
||||
.params(params)
|
||||
.initScript(initScript)
|
||||
.mapScript(mapScript)
|
||||
.combineScript(combineScript)
|
||||
.reduceScript(reduceScript)))
|
||||
.get();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
|
||||
Histogram histo = searchResponse.getAggregations().get("histo");
|
|
@ -101,7 +101,7 @@ public class TopHitsIT extends ESIntegTestCase {
|
|||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
@Override
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
return Collections.emptyMap();
|
||||
return Collections.singletonMap("5", script -> "5");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.common.settings.Setting;
|
|||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.Settings.Builder;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.MockEngineFactoryPlugin;
|
||||
|
@ -108,7 +107,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase {
|
|||
for (int i = 0; i < numDocs; i++) {
|
||||
try {
|
||||
IndexResponse indexResponse = client().prepareIndex("test", "type", "" + i).setTimeout(TimeValue.timeValueSeconds(1)).setSource("test", English.intToEnglish(i)).get();
|
||||
if (indexResponse.getOperation() == DocWriteResponse.Operation.CREATE) {
|
||||
if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) {
|
||||
numCreated++;
|
||||
added[i] = true;
|
||||
}
|
||||
|
|
|
@ -137,7 +137,7 @@ public class SearchWithRandomIOExceptionsIT extends ESIntegTestCase {
|
|||
added[i] = false;
|
||||
try {
|
||||
IndexResponse indexResponse = client().prepareIndex("test", "type", Integer.toString(i)).setTimeout(TimeValue.timeValueSeconds(1)).setSource("test", English.intToEnglish(i)).get();
|
||||
if (indexResponse.getOperation() == DocWriteResponse.Operation.CREATE) {
|
||||
if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) {
|
||||
numCreated++;
|
||||
added[i] = true;
|
||||
}
|
||||
|
|
|
@ -17,28 +17,30 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.fields;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.lookup.FieldLookup;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -49,11 +51,13 @@ import java.util.Arrays;
|
|||
import java.util.Base64;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
|
@ -72,13 +76,82 @@ import static org.hamcrest.Matchers.hasSize;
|
|||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SearchFieldsTests extends ESIntegTestCase {
|
||||
public class SearchFieldsIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return pluginList(CustomScriptPlugin.class);
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
|
||||
scripts.put("doc['num1'].value", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1");
|
||||
return num1.getValue();
|
||||
});
|
||||
|
||||
scripts.put("doc['num1'].value * factor", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1");
|
||||
Double factor = (Double) vars.get("factor");
|
||||
return num1.getValue() * factor;
|
||||
});
|
||||
|
||||
scripts.put("doc['date'].date.millis", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues.Longs date = (ScriptDocValues.Longs) doc.get("date");
|
||||
return date.getDate().getMillis();
|
||||
});
|
||||
|
||||
scripts.put("_fields['num1'].value", vars -> fieldsScript(vars, "num1"));
|
||||
scripts.put("_fields._uid.value", vars -> fieldsScript(vars, "_uid"));
|
||||
scripts.put("_fields._id.value", vars -> fieldsScript(vars, "_id"));
|
||||
scripts.put("_fields._type.value", vars -> fieldsScript(vars, "_type"));
|
||||
|
||||
scripts.put("_source.obj1", vars -> sourceScript(vars, "obj1"));
|
||||
scripts.put("_source.obj1.test", vars -> sourceScript(vars, "obj1.test"));
|
||||
scripts.put("_source.obj1.test", vars -> sourceScript(vars, "obj1.test"));
|
||||
scripts.put("_source.obj2", vars -> sourceScript(vars, "obj2"));
|
||||
scripts.put("_source.obj2.arr2", vars -> sourceScript(vars, "obj2.arr2"));
|
||||
scripts.put("_source.arr3", vars -> sourceScript(vars, "arr3"));
|
||||
|
||||
scripts.put("return null", vars -> null);
|
||||
|
||||
scripts.put("doc['l'].values", vars -> docScript(vars, "l"));
|
||||
scripts.put("doc['ml'].values", vars -> docScript(vars, "ml"));
|
||||
scripts.put("doc['d'].values", vars -> docScript(vars, "d"));
|
||||
scripts.put("doc['md'].values", vars -> docScript(vars, "md"));
|
||||
scripts.put("doc['s'].values", vars -> docScript(vars, "s"));
|
||||
scripts.put("doc['ms'].values", vars -> docScript(vars, "ms"));
|
||||
|
||||
return scripts;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static Object fieldsScript(Map<String, Object> vars, String fieldName) {
|
||||
Map<?, ?> fields = (Map) vars.get("_fields");
|
||||
FieldLookup fieldLookup = (FieldLookup) fields.get(fieldName);
|
||||
return fieldLookup.getValue();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static Object sourceScript(Map<String, Object> vars, String path) {
|
||||
Map<String, Object> source = (Map) vars.get("_source");
|
||||
return XContentMapValues.extractValue(path, source);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static Object docScript(Map<String, Object> vars, String fieldName) {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues<?> values = (ScriptDocValues<?>) doc.get(fieldName);
|
||||
return values.getValues();
|
||||
}
|
||||
}
|
||||
|
||||
public void testStoredFields() throws Exception {
|
||||
|
@ -127,7 +200,12 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3"));
|
||||
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2").execute().actionGet();
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addStoredField("*3")
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(1));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2));
|
||||
|
@ -156,7 +234,11 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1"));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3"));
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source").execute().actionGet();
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addStoredField("*")
|
||||
.addStoredField("_source")
|
||||
.get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(1));
|
||||
assertThat(searchResponse.getHits().getAt(0).source(), notNullValue());
|
||||
|
@ -175,25 +257,37 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();
|
||||
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).field("date", "1970-01-01T00:00:00").endObject())
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("test", "value beck")
|
||||
.field("num1", 1.0f)
|
||||
.field("date", "1970-01-01T00:00:00")
|
||||
.endObject())
|
||||
.execute().actionGet();
|
||||
client().admin().indices().prepareFlush().execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "2")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).field("date", "1970-01-01T00:00:25").endObject())
|
||||
.execute().actionGet();
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("test", "value beck")
|
||||
.field("num1", 2.0f)
|
||||
.field("date", "1970-01-01T00:00:25")
|
||||
.endObject())
|
||||
.get();
|
||||
client().admin().indices().prepareFlush().execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "3")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).field("date", "1970-01-01T00:02:00").endObject())
|
||||
.execute().actionGet();
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("test", "value beck")
|
||||
.field("num1", 3.0f)
|
||||
.field("date", "1970-01-01T00:02:00")
|
||||
.endObject())
|
||||
.get();
|
||||
client().admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
logger.info("running doc['num1'].value");
|
||||
SearchResponse response = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", new Script("doc['num1'].value"))
|
||||
.addScriptField("sNum1_field", new Script("_fields['num1'].value"))
|
||||
.addScriptField("date1", new Script("doc['date'].date.millis"))
|
||||
.addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addScriptField("sNum1_field", new Script("_fields['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addScriptField("date1", new Script("doc['date'].date.millis", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
@ -204,48 +298,48 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
Set<String> fields = new HashSet<>(response.getHits().getAt(0).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1")));
|
||||
assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0));
|
||||
assertThat((Double) response.getHits().getAt(0).fields().get("sNum1_field").values().get(0), equalTo(1.0));
|
||||
assertThat((Long) response.getHits().getAt(0).fields().get("date1").values().get(0), equalTo(0L));
|
||||
assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0));
|
||||
assertThat(response.getHits().getAt(0).fields().get("sNum1_field").values().get(0), equalTo(1.0));
|
||||
assertThat(response.getHits().getAt(0).fields().get("date1").values().get(0), equalTo(0L));
|
||||
assertThat(response.getHits().getAt(1).id(), equalTo("2"));
|
||||
fields = new HashSet<>(response.getHits().getAt(0).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1")));
|
||||
assertThat((Double) response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat((Double) response.getHits().getAt(1).fields().get("sNum1_field").values().get(0), equalTo(2.0));
|
||||
assertThat((Long) response.getHits().getAt(1).fields().get("date1").values().get(0), equalTo(25000L));
|
||||
assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.getHits().getAt(1).fields().get("sNum1_field").values().get(0), equalTo(2.0));
|
||||
assertThat(response.getHits().getAt(1).fields().get("date1").values().get(0), equalTo(25000L));
|
||||
assertThat(response.getHits().getAt(2).id(), equalTo("3"));
|
||||
fields = new HashSet<>(response.getHits().getAt(0).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1")));
|
||||
assertThat((Double) response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
assertThat((Double) response.getHits().getAt(2).fields().get("sNum1_field").values().get(0), equalTo(3.0));
|
||||
assertThat((Long) response.getHits().getAt(2).fields().get("date1").values().get(0), equalTo(120000L));
|
||||
assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
assertThat(response.getHits().getAt(2).fields().get("sNum1_field").values().get(0), equalTo(3.0));
|
||||
assertThat(response.getHits().getAt(2).fields().get("date1").values().get(0), equalTo(120000L));
|
||||
|
||||
logger.info("running doc['num1'].value * factor");
|
||||
Map<String, Object> params = MapBuilder.<String, Object>newMapBuilder().put("factor", 2.0).map();
|
||||
response = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", new Script("doc['num1'].value * factor", ScriptType.INLINE, null, params))
|
||||
.execute().actionGet();
|
||||
.addScriptField("sNum1", new Script("doc['num1'].value * factor", ScriptType.INLINE, CustomScriptPlugin.NAME, params))
|
||||
.get();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3L));
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
|
||||
fields = new HashSet<>(response.getHits().getAt(0).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(singleton("sNum1")));
|
||||
assertThat((Double) response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.getHits().getAt(1).id(), equalTo("2"));
|
||||
fields = new HashSet<>(response.getHits().getAt(0).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(singleton("sNum1")));
|
||||
assertThat((Double) response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(4.0));
|
||||
assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(4.0));
|
||||
assertThat(response.getHits().getAt(2).id(), equalTo("3"));
|
||||
fields = new HashSet<>(response.getHits().getAt(0).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(singleton("sNum1")));
|
||||
assertThat((Double) response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(6.0));
|
||||
assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(6.0));
|
||||
}
|
||||
|
||||
public void testUidBasedScriptFields() throws Exception {
|
||||
|
@ -260,8 +354,11 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
indexRandom(true, indexRequestBuilders);
|
||||
|
||||
SearchResponse response = client().prepareSearch()
|
||||
.setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs)
|
||||
.addScriptField("uid", new Script("_fields._uid.value")).get();
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.setSize(numDocs)
|
||||
.addScriptField("uid", new Script("_fields._uid.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -271,12 +368,15 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
Set<String> fields = new HashSet<>(response.getHits().getAt(i).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(singleton("uid")));
|
||||
assertThat((String)response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i)));
|
||||
assertThat(response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i)));
|
||||
}
|
||||
|
||||
response = client().prepareSearch()
|
||||
.setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs)
|
||||
.addScriptField("id", new Script("_fields._id.value")).get();
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.setSize(numDocs)
|
||||
.addScriptField("id", new Script("_fields._id.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -286,12 +386,15 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
Set<String> fields = new HashSet<>(response.getHits().getAt(i).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(singleton("id")));
|
||||
assertThat((String)response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i)));
|
||||
assertThat(response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i)));
|
||||
}
|
||||
|
||||
response = client().prepareSearch()
|
||||
.setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs)
|
||||
.addScriptField("type", new Script("_fields._type.value")).get();
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.setSize(numDocs)
|
||||
.addScriptField("type", new Script("_fields._type.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -301,13 +404,17 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
Set<String> fields = new HashSet<>(response.getHits().getAt(i).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(singleton("type")));
|
||||
assertThat((String)response.getHits().getAt(i).fields().get("type").value(), equalTo("type1"));
|
||||
assertThat(response.getHits().getAt(i).fields().get("type").value(), equalTo("type1"));
|
||||
}
|
||||
|
||||
response = client().prepareSearch()
|
||||
.setQuery(matchAllQuery()).addSort("num1", SortOrder.ASC).setSize(numDocs)
|
||||
.addScriptField("id", new Script("_fields._id.value")).addScriptField("uid", new Script("_fields._uid.value"))
|
||||
.addScriptField("type", new Script("_fields._type.value")).get();
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.setSize(numDocs)
|
||||
.addScriptField("id", new Script("_fields._id.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addScriptField("uid", new Script("_fields._uid.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addScriptField("type", new Script("_fields._type.value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -317,9 +424,9 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
Set<String> fields = new HashSet<>(response.getHits().getAt(i).fields().keySet());
|
||||
fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates
|
||||
assertThat(fields, equalTo(newHashSet("uid", "type", "id")));
|
||||
assertThat((String)response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i)));
|
||||
assertThat((String)response.getHits().getAt(i).fields().get("type").value(), equalTo("type1"));
|
||||
assertThat((String)response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i)));
|
||||
assertThat(response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i)));
|
||||
assertThat(response.getHits().getAt(i).fields().get("type").value(), equalTo("type1"));
|
||||
assertThat(response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -335,10 +442,14 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
.execute().actionGet();
|
||||
client().admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
SearchResponse response = client().prepareSearch().setQuery(matchAllQuery()).addScriptField("s_obj1", new Script("_source.obj1"))
|
||||
.addScriptField("s_obj1_test", new Script("_source.obj1.test")).addScriptField("s_obj2", new Script("_source.obj2"))
|
||||
.addScriptField("s_obj2_arr2", new Script("_source.obj2.arr2")).addScriptField("s_arr3", new Script("_source.arr3"))
|
||||
.execute().actionGet();
|
||||
SearchResponse response = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("s_obj1", new Script("_source.obj1", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addScriptField("s_obj1_test", new Script("_source.obj1.test", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addScriptField("s_obj2", new Script("_source.obj2", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addScriptField("s_obj2_arr2", new Script("_source.obj2.arr2", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addScriptField("s_arr3", new Script("_source.arr3", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertThat("Failures " + Arrays.toString(response.getShardFailures()), response.getShardFailures().length, equalTo(0));
|
||||
|
||||
|
@ -365,12 +476,13 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
|
||||
public void testScriptFieldsForNullReturn() throws Exception {
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
.setSource("foo", "bar")
|
||||
.setRefreshPolicy("true").get();
|
||||
.setSource("foo", "bar")
|
||||
.setRefreshPolicy("true").get();
|
||||
|
||||
SearchResponse response = client().prepareSearch().setQuery(matchAllQuery())
|
||||
.addScriptField("test_script_1", new Script("return null"))
|
||||
.get();
|
||||
SearchResponse response = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("test_script_1", new Script("return null", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -401,17 +513,53 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
public void testStoredFieldsWithoutSource() throws Exception {
|
||||
createIndex("test");
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties")
|
||||
.startObject("byte_field").field("type", "byte").field("store", true).endObject()
|
||||
.startObject("short_field").field("type", "short").field("store", true).endObject()
|
||||
.startObject("integer_field").field("type", "integer").field("store", true).endObject()
|
||||
.startObject("long_field").field("type", "long").field("store", true).endObject()
|
||||
.startObject("float_field").field("type", "float").field("store", true).endObject()
|
||||
.startObject("double_field").field("type", "double").field("store", true).endObject()
|
||||
.startObject("date_field").field("type", "date").field("store", true).endObject()
|
||||
.startObject("boolean_field").field("type", "boolean").field("store", true).endObject()
|
||||
.startObject("binary_field").field("type", "binary").field("store", true).endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
String mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_source")
|
||||
.field("enabled", false)
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("byte_field")
|
||||
.field("type", "byte")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("short_field")
|
||||
.field("type", "short")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("integer_field")
|
||||
.field("type", "integer")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("long_field")
|
||||
.field("type", "long")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("float_field")
|
||||
.field("type", "float")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("double_field")
|
||||
.field("type", "double")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("date_field")
|
||||
.field("type", "date")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("boolean_field")
|
||||
.field("type", "boolean")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("binary_field")
|
||||
.field("type", "binary")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.string();
|
||||
|
||||
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();
|
||||
|
||||
|
@ -449,17 +597,17 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
"float_field", "double_field", "date_field", "boolean_field", "binary_field")));
|
||||
|
||||
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("byte_field").value().toString(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("short_field").value().toString(), equalTo("2"));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("integer_field").value(), equalTo((Object) 3));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("long_field").value(), equalTo((Object) 4L));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("float_field").value(), equalTo((Object) 5.0f));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("double_field").value(), equalTo((Object) 6.0d));
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(0);
|
||||
assertThat(searchHit.fields().get("byte_field").value().toString(), equalTo("1"));
|
||||
assertThat(searchHit.fields().get("short_field").value().toString(), equalTo("2"));
|
||||
assertThat(searchHit.fields().get("integer_field").value(), equalTo((Object) 3));
|
||||
assertThat(searchHit.fields().get("long_field").value(), equalTo((Object) 4L));
|
||||
assertThat(searchHit.fields().get("float_field").value(), equalTo((Object) 5.0f));
|
||||
assertThat(searchHit.fields().get("double_field").value(), equalTo((Object) 6.0d));
|
||||
String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("date_field").value(), equalTo((Object) dateTime));
|
||||
assertThat(searchResponse.getHits().getAt(0).fields().get("boolean_field").value(), equalTo((Object) Boolean.TRUE));
|
||||
assertThat(((BytesReference) searchResponse.getHits().getAt(0).fields().get("binary_field").value()), equalTo((BytesReference) new BytesArray("testing text".getBytes("UTF8"))));
|
||||
|
||||
assertThat(searchHit.fields().get("date_field").value(), equalTo((Object) dateTime));
|
||||
assertThat(searchHit.fields().get("boolean_field").value(), equalTo((Object) Boolean.TRUE));
|
||||
assertThat(searchHit.fields().get("binary_field").value(), equalTo(new BytesArray("testing text" .getBytes("UTF8"))));
|
||||
}
|
||||
|
||||
public void testSearchFieldsMetaData() throws Exception {
|
||||
|
@ -575,25 +723,57 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).fieldDataField("test_field")).get();
|
||||
assertHitCount(searchResponse, 1);
|
||||
Map<String,SearchHitField> fields = searchResponse.getHits().getHits()[0].getFields();
|
||||
assertThat((String)fields.get("test_field").value(), equalTo("foobar"));
|
||||
assertThat(fields.get("test_field").value(), equalTo("foobar"));
|
||||
}
|
||||
|
||||
public void testFieldsPulledFromFieldData() throws Exception {
|
||||
createIndex("test");
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties")
|
||||
.startObject("text_field").field("type", "text").field("fielddata", true).endObject()
|
||||
.startObject("keyword_field").field("type", "keyword").endObject()
|
||||
.startObject("byte_field").field("type", "byte").endObject()
|
||||
.startObject("short_field").field("type", "short").endObject()
|
||||
.startObject("integer_field").field("type", "integer").endObject()
|
||||
.startObject("long_field").field("type", "long").endObject()
|
||||
.startObject("float_field").field("type", "float").endObject()
|
||||
.startObject("double_field").field("type", "double").endObject()
|
||||
.startObject("date_field").field("type", "date").endObject()
|
||||
.startObject("boolean_field").field("type", "boolean").endObject()
|
||||
.startObject("binary_field").field("type", "binary").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
String mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_source")
|
||||
.field("enabled", false)
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("text_field")
|
||||
.field("type", "text")
|
||||
.field("fielddata", true)
|
||||
.endObject()
|
||||
.startObject("keyword_field")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.startObject("byte_field")
|
||||
.field("type", "byte")
|
||||
.endObject()
|
||||
.startObject("short_field")
|
||||
.field("type", "short")
|
||||
.endObject()
|
||||
.startObject("integer_field")
|
||||
.field("type", "integer")
|
||||
.endObject()
|
||||
.startObject("long_field")
|
||||
.field("type", "long")
|
||||
.endObject()
|
||||
.startObject("float_field")
|
||||
.field("type", "float")
|
||||
.endObject()
|
||||
.startObject("double_field")
|
||||
.field("type", "double")
|
||||
.endObject()
|
||||
.startObject("date_field")
|
||||
.field("type", "date")
|
||||
.endObject()
|
||||
.startObject("boolean_field")
|
||||
.field("type", "boolean")
|
||||
.endObject()
|
||||
.startObject("binary_field")
|
||||
.field("type", "binary")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.string();
|
||||
|
||||
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();
|
||||
|
||||
|
@ -667,7 +847,7 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
ensureSearchable();
|
||||
SearchRequestBuilder req = client().prepareSearch("index");
|
||||
for (String field : Arrays.asList("s", "ms", "l", "ml", "d", "md")) {
|
||||
req.addScriptField(field, new Script("doc['" + field + "'].values"));
|
||||
req.addScriptField(field, new Script("doc['" + field + "'].values", ScriptType.INLINE, CustomScriptPlugin.NAME, null));
|
||||
}
|
||||
SearchResponse resp = req.get();
|
||||
assertSearchResponse(resp);
|
||||
|
@ -690,11 +870,11 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
|
||||
indexRandom(true,
|
||||
client().prepareIndex("test", "my-type1", "1")
|
||||
.setRouting("1")
|
||||
.setTimestamp("205097")
|
||||
.setTTL(10000000000000L)
|
||||
.setParent("parent_1")
|
||||
.setSource(jsonBuilder().startObject().field("field1", "value").endObject()));
|
||||
.setRouting("1")
|
||||
.setTimestamp("205097")
|
||||
.setTTL(10000000000000L)
|
||||
.setParent("parent_1")
|
||||
.setSource(jsonBuilder().startObject().field("field1", "value").endObject()));
|
||||
|
||||
SearchResponse response = client().prepareSearch("test").addStoredField("field1").get();
|
||||
assertSearchResponse(response);
|
|
@ -16,26 +16,27 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.functionscore;
|
||||
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.ScoreAccessor;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
|
||||
|
@ -44,6 +45,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
|||
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction;
|
||||
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction;
|
||||
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction;
|
||||
import static org.elasticsearch.script.MockScriptPlugin.NAME;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
|
@ -54,11 +56,41 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
|||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class RandomScoreFunctionTests extends ESIntegTestCase {
|
||||
public class RandomScoreFunctionIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return pluginList(CustomScriptPlugin.class);
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
|
||||
scripts.put("log(doc['index'].value + (factor * _score))",
|
||||
vars -> scoringScript(vars, ScoreAccessor::doubleValue));
|
||||
scripts.put("log(doc['index'].value + (factor * _score.intValue()))",
|
||||
vars -> scoringScript(vars, ScoreAccessor::intValue));
|
||||
scripts.put("log(doc['index'].value + (factor * _score.longValue()))",
|
||||
vars -> scoringScript(vars, ScoreAccessor::longValue));
|
||||
scripts.put("log(doc['index'].value + (factor * _score.floatValue()))",
|
||||
vars -> scoringScript(vars, ScoreAccessor::floatValue));
|
||||
scripts.put("log(doc['index'].value + (factor * _score.doubleValue()))",
|
||||
vars -> scoringScript(vars, ScoreAccessor::doubleValue));
|
||||
return scripts;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static Double scoringScript(Map<String, Object> vars, Function<ScoreAccessor, Number> scoring) {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Double index = ((Number) ((ScriptDocValues<?>) doc.get("index")).getValues().get(0)).doubleValue();
|
||||
Double score = scoring.apply((ScoreAccessor) vars.get("_score")).doubleValue();
|
||||
Integer factor = (Integer) vars.get("factor");
|
||||
return Math.log(index + (factor * score));
|
||||
}
|
||||
}
|
||||
|
||||
public void testConsistentHitsWithSameSeed() throws Exception {
|
||||
|
@ -86,17 +118,15 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
.setPreference(preference)
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed)))
|
||||
.execute().actionGet();
|
||||
assertThat("Failures " + Arrays.toString(searchResponse.getShardFailures()), searchResponse.getShardFailures().length, CoreMatchers.equalTo(0));
|
||||
assertThat("Failures " + Arrays.toString(searchResponse.getShardFailures()),
|
||||
searchResponse.getShardFailures().length, CoreMatchers.equalTo(0));
|
||||
final int hitCount = searchResponse.getHits().getHits().length;
|
||||
final SearchHit[] currentHits = searchResponse.getHits().getHits();
|
||||
ArrayUtil.timSort(currentHits, new Comparator<SearchHit>() {
|
||||
@Override
|
||||
public int compare(SearchHit o1, SearchHit o2) {
|
||||
// for tie-breaking we have to resort here since if the score is
|
||||
// identical we rely on collection order which might change.
|
||||
int cmp = Float.compare(o1.getScore(), o2.getScore());
|
||||
return cmp == 0 ? o1.getId().compareTo(o2.getId()) : cmp;
|
||||
}
|
||||
ArrayUtil.timSort(currentHits, (o1, o2) -> {
|
||||
// for tie-breaking we have to resort here since if the score is
|
||||
// identical we rely on collection order which might change.
|
||||
int cmp = Float.compare(o1.getScore(), o2.getScore());
|
||||
return cmp == 0 ? o1.getId().compareTo(o2.getId()) : cmp;
|
||||
});
|
||||
if (i == 0) {
|
||||
assertThat(hits, nullValue());
|
||||
|
@ -128,71 +158,92 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
|
||||
int docCount = randomIntBetween(100, 200);
|
||||
for (int i = 0; i < docCount; i++) {
|
||||
client().prepareIndex("test", "type", "" + i).setSource("body", randomFrom(Arrays.asList("foo", "bar", "baz")), "index", i + 1) // we add 1 to the index field to make sure that the scripts below never compute log(0)
|
||||
client().prepareIndex("test", "type", "" + i)
|
||||
// we add 1 to the index field to make sure that the scripts below never compute log(0)
|
||||
.setSource("body", randomFrom(Arrays.asList("foo", "bar", "baz")), "index", i + 1)
|
||||
.get();
|
||||
}
|
||||
refresh();
|
||||
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("factor", randomIntBetween(2, 4));
|
||||
|
||||
// Test for accessing _score
|
||||
Script script = new Script("log(doc['index'].value + (factor * _score))", ScriptType.INLINE, NAME, params);
|
||||
SearchResponse resp = client()
|
||||
.prepareSearch("test")
|
||||
.setQuery(
|
||||
functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score))", ScriptType.INLINE, null, params)))
|
||||
})).get();
|
||||
functionScoreQuery(matchQuery("body", "foo"),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder[] {
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script))
|
||||
}
|
||||
))
|
||||
.get();
|
||||
assertNoFailures(resp);
|
||||
SearchHit firstHit = resp.getHits().getAt(0);
|
||||
assertThat(firstHit.getScore(), greaterThan(1f));
|
||||
|
||||
// Test for accessing _score.intValue()
|
||||
script = new Script("log(doc['index'].value + (factor * _score.intValue()))", ScriptType.INLINE, NAME, params);
|
||||
resp = client()
|
||||
.prepareSearch("test")
|
||||
.setQuery(
|
||||
functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.intValue()))", ScriptType.INLINE, null, params)))
|
||||
})).get();
|
||||
functionScoreQuery(matchQuery("body", "foo"),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder[] {
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script))
|
||||
}
|
||||
))
|
||||
.get();
|
||||
assertNoFailures(resp);
|
||||
firstHit = resp.getHits().getAt(0);
|
||||
assertThat(firstHit.getScore(), greaterThan(1f));
|
||||
|
||||
// Test for accessing _score.longValue()
|
||||
script = new Script("log(doc['index'].value + (factor * _score.longValue()))", ScriptType.INLINE, NAME, params);
|
||||
resp = client()
|
||||
.prepareSearch("test")
|
||||
.setQuery(
|
||||
functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.longValue()))", ScriptType.INLINE, null, params)))
|
||||
})).get();
|
||||
functionScoreQuery(matchQuery("body", "foo"),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder[] {
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script))
|
||||
}
|
||||
))
|
||||
.get();
|
||||
assertNoFailures(resp);
|
||||
firstHit = resp.getHits().getAt(0);
|
||||
assertThat(firstHit.getScore(), greaterThan(1f));
|
||||
|
||||
// Test for accessing _score.floatValue()
|
||||
script = new Script("log(doc['index'].value + (factor * _score.floatValue()))", ScriptType.INLINE, NAME, params);
|
||||
resp = client()
|
||||
.prepareSearch("test")
|
||||
.setQuery(
|
||||
functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.floatValue()))",
|
||||
ScriptType.INLINE, null, params)))
|
||||
})).get();
|
||||
functionScoreQuery(matchQuery("body", "foo"),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder[] {
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script))
|
||||
}
|
||||
))
|
||||
.get();
|
||||
assertNoFailures(resp);
|
||||
firstHit = resp.getHits().getAt(0);
|
||||
assertThat(firstHit.getScore(), greaterThan(1f));
|
||||
|
||||
// Test for accessing _score.doubleValue()
|
||||
script = new Script("log(doc['index'].value + (factor * _score.doubleValue()))", ScriptType.INLINE, NAME, params);
|
||||
resp = client()
|
||||
.prepareSearch("test")
|
||||
.setQuery(
|
||||
functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
|
||||
functionScoreQuery(matchQuery("body", "foo"),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder[] {
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)),
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.doubleValue()))",
|
||||
ScriptType.INLINE, null, params)))
|
||||
})).get();
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script))
|
||||
}
|
||||
))
|
||||
.get();
|
||||
assertNoFailures(resp);
|
||||
firstHit = resp.getHits().getAt(0);
|
||||
assertThat(firstHit.getScore(), greaterThan(1f));
|
||||
|
@ -208,9 +259,9 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
int seed = 12345678;
|
||||
|
||||
SearchResponse resp = client().prepareSearch("test")
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed)))
|
||||
.setExplain(true)
|
||||
.get();
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed)))
|
||||
.setExplain(true)
|
||||
.get();
|
||||
assertNoFailures(resp);
|
||||
assertEquals(1, resp.getHits().totalHits());
|
||||
SearchHit firstHit = resp.getHits().getAt(0);
|
||||
|
@ -222,8 +273,8 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
SearchResponse resp = client().prepareSearch("test")
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(1234)))
|
||||
.get();
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(1234)))
|
||||
.get();
|
||||
assertNoFailures(resp);
|
||||
assertEquals(0, resp.getHits().totalHits());
|
||||
}
|
||||
|
@ -243,9 +294,9 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
for (int i = 0; i < iters; ++i) {
|
||||
int seed = randomInt();
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed)))
|
||||
.setSize(docCount)
|
||||
.execute().actionGet();
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(seed)))
|
||||
.setSize(docCount)
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
||||
|
@ -264,19 +315,19 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
flushAndRefresh();
|
||||
|
||||
assertNoFailures(client().prepareSearch()
|
||||
.setSize(docCount) // get all docs otherwise we are prone to tie-breaking
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomInt())))
|
||||
.execute().actionGet());
|
||||
.setSize(docCount) // get all docs otherwise we are prone to tie-breaking
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomInt())))
|
||||
.execute().actionGet());
|
||||
|
||||
assertNoFailures(client().prepareSearch()
|
||||
.setSize(docCount) // get all docs otherwise we are prone to tie-breaking
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomLong())))
|
||||
.execute().actionGet());
|
||||
.setSize(docCount) // get all docs otherwise we are prone to tie-breaking
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomLong())))
|
||||
.execute().actionGet());
|
||||
|
||||
assertNoFailures(client().prepareSearch()
|
||||
.setSize(docCount) // get all docs otherwise we are prone to tie-breaking
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomRealisticUnicodeOfLengthBetween(10, 20))))
|
||||
.execute().actionGet());
|
||||
.setSize(docCount) // get all docs otherwise we are prone to tie-breaking
|
||||
.setQuery(functionScoreQuery(matchAllQuery(), randomFunction(randomRealisticUnicodeOfLengthBetween(10, 20))))
|
||||
.execute().actionGet());
|
||||
}
|
||||
|
||||
public void checkDistribution() throws Exception {
|
||||
|
@ -344,5 +395,4 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
|
||||
logger.info("mean: {}", sum / (double) count);
|
||||
}
|
||||
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.geo;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -27,32 +27,71 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class GeoDistanceTests extends ESIntegTestCase {
|
||||
public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
private static final double source_lat = 32.798;
|
||||
private static final double source_long = -117.151;
|
||||
private static final double target_lat = 32.81;
|
||||
private static final double target_long = -117.21;
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class);
|
||||
return pluginList(CustomScriptPlugin.class, InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
|
||||
scripts.put("arcDistance", vars -> distanceScript(vars,
|
||||
location -> location.arcDistance(target_lat, target_long)));
|
||||
scripts.put("distance", vars -> distanceScript(vars,
|
||||
location -> location.distance(target_lat, target_long)));
|
||||
scripts.put("arcDistanceInKm", vars -> distanceScript(vars,
|
||||
location -> location.arcDistanceInKm(target_lat, target_long)));
|
||||
scripts.put("distanceInKm", vars -> distanceScript(vars,
|
||||
location -> location.distanceInKm(target_lat, target_long)));
|
||||
scripts.put("arcDistanceInKm(lat, lon + 360)", vars -> distanceScript(vars,
|
||||
location -> location.arcDistanceInKm(target_lat, target_long + 360)));
|
||||
scripts.put("arcDistanceInKm(lat + 360, lon)", vars -> distanceScript(vars,
|
||||
location -> location.arcDistanceInKm(target_lat + 360, target_long)));
|
||||
scripts.put("arcDistanceInMiles", vars -> distanceScript(vars,
|
||||
location -> location.arcDistanceInMiles(target_lat, target_long)));
|
||||
scripts.put("distanceInMiles", vars -> distanceScript(vars,
|
||||
location -> location.distanceInMiles(target_lat, target_long)));
|
||||
|
||||
return scripts;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static Double distanceScript(Map<String, Object> vars, Function<ScriptDocValues.GeoPoints, Double> distance) {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
return distance.apply((ScriptDocValues.GeoPoints) doc.get("location"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testDistanceScript() throws Exception {
|
||||
double source_lat = 32.798;
|
||||
double source_long = -117.151;
|
||||
double target_lat = 32.81;
|
||||
double target_long = -117.21;
|
||||
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
|
@ -65,69 +104,78 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
.field("name", "TestPosition")
|
||||
.startObject("location").field("lat", source_lat).field("lon", source_long).endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("name", "TestPosition")
|
||||
.startObject("location")
|
||||
.field("lat", source_lat)
|
||||
.field("lon", source_long)
|
||||
.endObject()
|
||||
.endObject())
|
||||
.get();
|
||||
|
||||
refresh();
|
||||
|
||||
// Test doc['location'].arcDistance(lat, lon)
|
||||
SearchResponse searchResponse1 = client().prepareSearch().addStoredField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].arcDistance(" + target_lat + "," + target_long + ")")).execute()
|
||||
.actionGet();
|
||||
.addScriptField("distance", new Script("arcDistance", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance1,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d));
|
||||
|
||||
// Test doc['location'].distance(lat, lon)
|
||||
SearchResponse searchResponse2 = client().prepareSearch().addStoredField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].distance(" + target_lat + "," + target_long + ")")).execute()
|
||||
.actionGet();
|
||||
.addScriptField("distance", new Script("distance", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance2,
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d));
|
||||
|
||||
// Test doc['location'].arcDistanceInKm(lat, lon)
|
||||
SearchResponse searchResponse3 = client().prepareSearch().addStoredField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + target_lat + "," + target_long + ")"))
|
||||
.execute().actionGet();
|
||||
.addScriptField("distance", new Script("arcDistanceInKm", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
Double resultArcDistance3 = searchResponse3.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance3,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
// Test doc['location'].distanceInKm(lat, lon)
|
||||
SearchResponse searchResponse4 = client().prepareSearch().addStoredField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].distanceInKm(" + target_lat + "," + target_long + ")")).execute()
|
||||
.actionGet();
|
||||
.addScriptField("distance", new Script("distanceInKm", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance4,
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
SearchResponse searchResponse5 = client()
|
||||
.prepareSearch()
|
||||
.addStoredField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat) + "," + (target_long + 360) + ")"))
|
||||
.execute().actionGet();
|
||||
// Test doc['location'].arcDistanceInKm(lat, lon + 360)
|
||||
SearchResponse searchResponse5 = client().prepareSearch().addStoredField("_source")
|
||||
.addScriptField("distance", new Script("arcDistanceInKm(lat, lon + 360)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
Double resultArcDistance5 = searchResponse5.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance5,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
SearchResponse searchResponse6 = client()
|
||||
.prepareSearch()
|
||||
.addStoredField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat + 360) + "," + (target_long) + ")"))
|
||||
.execute().actionGet();
|
||||
// Test doc['location'].arcDistanceInKm(lat + 360, lon)
|
||||
SearchResponse searchResponse6 = client().prepareSearch().addStoredField("_source")
|
||||
.addScriptField("distance", new Script("arcDistanceInKm(lat + 360, lon)", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
Double resultArcDistance6 = searchResponse6.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance6,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
// Test doc['location'].arcDistanceInMiles(lat, lon)
|
||||
SearchResponse searchResponse7 = client().prepareSearch().addStoredField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].arcDistanceInMiles(" + target_lat + "," + target_long + ")"))
|
||||
.execute().actionGet();
|
||||
.addScriptField("distance", new Script("arcDistanceInMiles", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
Double resultDistance7 = searchResponse7.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance7,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.01d));
|
||||
|
||||
// Test doc['location'].distanceInMiles(lat, lon)
|
||||
SearchResponse searchResponse8 = client().prepareSearch().addStoredField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].distanceInMiles(" + target_lat + "," + target_long + ")"))
|
||||
.execute().actionGet();
|
||||
.addScriptField("distance", new Script("distanceInMiles", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
Double resultDistance8 = searchResponse8.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance8,
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.01d));
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.geo;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||
|
@ -26,25 +26,13 @@ import org.elasticsearch.index.IndexService;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class GeoShapeIntegrationTests extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
}
|
||||
public class GeoShapeIntegrationIT extends ESIntegTestCase {
|
||||
|
||||
/**
|
||||
* Test that orientation parameter correctly persists across cluster restart
|
|
@ -83,7 +83,7 @@ public class InnerHitsIT extends ESIntegTestCase {
|
|||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
@Override
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
return Collections.emptyMap();
|
||||
return Collections.singletonMap("5", script -> "5");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -149,7 +149,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
|
||||
// check delete, so all is gone...
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "2").execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
|
||||
// flush, so we fetch it from the index (as see that we filter nested docs)
|
||||
flush();
|
||||
|
|
|
@ -17,15 +17,16 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.scriptfilter;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
|
@ -34,19 +35,47 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.scriptQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope= ESIntegTestCase.Scope.SUITE)
|
||||
public class ScriptQuerySearchTests extends ESIntegTestCase {
|
||||
public class ScriptQuerySearchIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return Collections.singleton(CustomScriptPlugin.class);
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
|
||||
scripts.put("doc['num1'].value", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
return doc.get("num1");
|
||||
});
|
||||
|
||||
scripts.put("doc['num1'].value > 1", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1");
|
||||
return num1.getValue() > 1;
|
||||
});
|
||||
|
||||
scripts.put("doc['num1'].value > param1", vars -> {
|
||||
Integer param1 = (Integer) vars.get("param1");
|
||||
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues.Doubles num1 = (ScriptDocValues.Doubles) doc.get("num1");
|
||||
return num1.getValue() > param1;
|
||||
});
|
||||
|
||||
return scripts;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -62,21 +91,23 @@ public class ScriptQuerySearchTests extends ESIntegTestCase {
|
|||
createIndex("test");
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject())
|
||||
.execute().actionGet();
|
||||
.get();
|
||||
flush();
|
||||
client().prepareIndex("test", "type1", "2")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).endObject())
|
||||
.execute().actionGet();
|
||||
.get();
|
||||
flush();
|
||||
client().prepareIndex("test", "type1", "3")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).endObject())
|
||||
.execute().actionGet();
|
||||
.get();
|
||||
refresh();
|
||||
|
||||
logger.info("running doc['num1'].value > 1");
|
||||
SearchResponse response = client().prepareSearch()
|
||||
.setQuery(scriptQuery(new Script("doc['num1'].value > 1"))).addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", new Script("doc['num1'].value")).execute().actionGet();
|
||||
.setQuery(scriptQuery(new Script("doc['num1'].value > 1", ScriptType.INLINE, CustomScriptPlugin.NAME, null)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2L));
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("2"));
|
||||
|
@ -90,8 +121,10 @@ public class ScriptQuerySearchTests extends ESIntegTestCase {
|
|||
logger.info("running doc['num1'].value > param1");
|
||||
response = client()
|
||||
.prepareSearch()
|
||||
.setQuery(scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, null, params)))
|
||||
.addSort("num1", SortOrder.ASC).addScriptField("sNum1", new Script("doc['num1'].value")).execute().actionGet();
|
||||
.setQuery(scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, CustomScriptPlugin.NAME, params)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1L));
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("3"));
|
||||
|
@ -102,9 +135,10 @@ public class ScriptQuerySearchTests extends ESIntegTestCase {
|
|||
logger.info("running doc['num1'].value > param1");
|
||||
response = client()
|
||||
.prepareSearch()
|
||||
.setQuery(
|
||||
scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, null, params)))
|
||||
.addSort("num1", SortOrder.ASC).addScriptField("sNum1", new Script("doc['num1'].value")).execute().actionGet();
|
||||
.setQuery(scriptQuery(new Script("doc['num1'].value > param1", ScriptType.INLINE, CustomScriptPlugin.NAME, params)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", new Script("doc['num1'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.get();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(3L));
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
|
|
@ -0,0 +1,476 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import static org.elasticsearch.search.sort.SortBuilders.scriptSort;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public class SimpleSortIT extends ESIntegTestCase {
|
||||
|
||||
private static final String DOUBLE_APOSTROPHE = "\u0027\u0027";
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(CustomScriptPlugin.class, InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
|
||||
scripts.put("doc['str_value'].value", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
return ((ScriptDocValues.Strings) doc.get("str_value")).getValue();
|
||||
});
|
||||
|
||||
scripts.put("doc['id'].value", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
return ((ScriptDocValues.Strings) doc.get("id")).getValue();
|
||||
});
|
||||
|
||||
scripts.put("doc['id'].values[0]", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
return ((ScriptDocValues.Strings) doc.get("id")).getValues().get(0);
|
||||
});
|
||||
|
||||
scripts.put("get min long", vars -> getMinValueScript(vars, Long.MAX_VALUE, "lvalue", l -> (Long) l));
|
||||
scripts.put("get min double", vars -> getMinValueScript(vars, Double.MAX_VALUE, "dvalue", d -> (Double) d));
|
||||
scripts.put("get min string", vars -> getMinValueScript(vars, Integer.MAX_VALUE, "svalue", s -> Integer.parseInt((String) s)));
|
||||
scripts.put("get min geopoint lon", vars -> getMinValueScript(vars, Double.MAX_VALUE, "gvalue", g -> ((GeoPoint) g).getLon()));
|
||||
|
||||
scripts.put(DOUBLE_APOSTROPHE, vars -> DOUBLE_APOSTROPHE);
|
||||
|
||||
return scripts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the minimal value from a set of values.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
static <T extends Comparable<T>> T getMinValueScript(Map<String, Object> vars, T initialValue, String fieldName,
|
||||
Function<Object, T> converter) {
|
||||
T retval = initialValue;
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
ScriptDocValues<?> values = (ScriptDocValues<?>) doc.get(fieldName);
|
||||
for (Object v : values.getValues()) {
|
||||
T value = converter.apply(v);
|
||||
retval = (value.compareTo(retval) < 0) ? value : retval;
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
|
||||
public void testSimpleSorts() throws Exception {
|
||||
Random random = random();
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("str_value")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.startObject("boolean_value")
|
||||
.field("type", "boolean")
|
||||
.endObject()
|
||||
.startObject("byte_value")
|
||||
.field("type", "byte")
|
||||
.endObject()
|
||||
.startObject("short_value")
|
||||
.field("type", "short")
|
||||
.endObject()
|
||||
.startObject("integer_value")
|
||||
.field("type", "integer")
|
||||
.endObject()
|
||||
.startObject("long_value")
|
||||
.field("type", "long")
|
||||
.endObject()
|
||||
.startObject("float_value")
|
||||
.field("type", "float")
|
||||
.endObject()
|
||||
.startObject("double_value")
|
||||
.field("type", "double")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()));
|
||||
ensureGreen();
|
||||
List<IndexRequestBuilder> builders = new ArrayList<>();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
builders.add(client().prepareIndex("test", "type1", Integer.toString(i))
|
||||
.setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.field("str_value", new String(new char[]{(char) (97 + i), (char) (97 + i)}))
|
||||
.field("boolean_value", true)
|
||||
.field("byte_value", i)
|
||||
.field("short_value", i)
|
||||
.field("integer_value", i)
|
||||
.field("long_value", i)
|
||||
.field("float_value", 0.1 * i)
|
||||
.field("double_value", 0.1 * i)
|
||||
.endObject()
|
||||
));
|
||||
}
|
||||
Collections.shuffle(builders, random);
|
||||
for (IndexRequestBuilder builder : builders) {
|
||||
builder.execute().actionGet();
|
||||
if (random.nextBoolean()) {
|
||||
if (random.nextInt(5) != 0) {
|
||||
refresh();
|
||||
} else {
|
||||
client().admin().indices().prepareFlush().get();
|
||||
}
|
||||
}
|
||||
}
|
||||
refresh();
|
||||
|
||||
// STRING script
|
||||
int size = 1 + random.nextInt(10);
|
||||
|
||||
Script script = new Script("doc['str_value'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(size)
|
||||
.addSort(new ScriptSortBuilder(script, ScriptSortType.STRING))
|
||||
.get();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(i);
|
||||
assertThat(searchHit.id(), equalTo(Integer.toString(i)));
|
||||
|
||||
String expected = new String(new char[]{(char) (97 + i), (char) (97 + i)});
|
||||
assertThat(searchHit.sortValues()[0].toString(), equalTo(expected));
|
||||
}
|
||||
|
||||
size = 1 + random.nextInt(10);
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(size)
|
||||
.addSort("str_value", SortOrder.DESC)
|
||||
.get();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(i);
|
||||
assertThat(searchHit.id(), equalTo(Integer.toString(9 - i)));
|
||||
|
||||
String expected = new String(new char[]{(char) (97 + (9 - i)), (char) (97 + (9 - i))});
|
||||
assertThat(searchHit.sortValues()[0].toString(), equalTo(expected));
|
||||
}
|
||||
|
||||
assertThat(searchResponse.toString(), not(containsString("error")));
|
||||
assertNoFailures(searchResponse);
|
||||
}
|
||||
|
||||
public void testSortMinValueScript() throws IOException {
|
||||
String mapping = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("lvalue")
|
||||
.field("type", "long")
|
||||
.endObject()
|
||||
.startObject("dvalue")
|
||||
.field("type", "double")
|
||||
.endObject()
|
||||
.startObject("svalue")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.startObject("gvalue")
|
||||
.field("type", "geo_point")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().string();
|
||||
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
ensureGreen();
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "type1", "" + i)
|
||||
.setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.field("ord", i)
|
||||
.field("svalue", new String[]{"" + i, "" + (i + 1), "" + (i + 2)})
|
||||
.field("lvalue", new long[]{i, i + 1, i + 2})
|
||||
.field("dvalue", new double[]{i, i + 1, i + 2})
|
||||
.startObject("gvalue")
|
||||
.field("lat", (double) i + 1)
|
||||
.field("lon", (double) i)
|
||||
.endObject()
|
||||
.endObject())
|
||||
.get();
|
||||
}
|
||||
|
||||
for (int i = 10; i < 20; i++) { // add some docs that don't have values in those fields
|
||||
client().prepareIndex("test", "type1", "" + i)
|
||||
.setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.field("ord", i)
|
||||
.endObject())
|
||||
.get();
|
||||
}
|
||||
client().admin().indices().prepareRefresh("test").get();
|
||||
|
||||
// test the long values
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("min", new Script("get min long", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long"))
|
||||
.setSize(10)
|
||||
.get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertHitCount(searchResponse, 20L);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(i);
|
||||
assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").value(), equalTo((long) i));
|
||||
}
|
||||
|
||||
// test the double values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("min", new Script("get min double", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long"))
|
||||
.setSize(10)
|
||||
.get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertHitCount(searchResponse, 20L);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(i);
|
||||
assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").value(), equalTo((double) i));
|
||||
}
|
||||
|
||||
// test the string values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("min", new Script("get min string", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long"))
|
||||
.setSize(10)
|
||||
.get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertHitCount(searchResponse, 20L);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(i);
|
||||
assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").value(), equalTo(i));
|
||||
}
|
||||
|
||||
// test the geopoint values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("min", new Script("get min geopoint lon", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long"))
|
||||
.setSize(10)
|
||||
.get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertHitCount(searchResponse, 20L);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(i);
|
||||
assertThat("res: " + i + " id: " + searchHit.getId(), searchHit.field("min").value(), closeTo(i, GeoUtils.TOLERANCE));
|
||||
}
|
||||
}
|
||||
|
||||
public void testDocumentsWithNullValue() throws Exception {
|
||||
// TODO: sort shouldn't fail when sort field is mapped dynamically
|
||||
// We have to specify mapping explicitly because by the time search is performed dynamic mapping might not
|
||||
// be propagated to all nodes yet and sort operation fail when the sort field is not defined
|
||||
String mapping = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("id")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.startObject("svalue")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().string();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("id", "1")
|
||||
.field("svalue", "aaa")
|
||||
.endObject())
|
||||
.get();
|
||||
|
||||
client().prepareIndex("test", "type1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("id", "2")
|
||||
.nullField("svalue")
|
||||
.endObject())
|
||||
.get();
|
||||
|
||||
client().prepareIndex("test", "type1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("id", "3")
|
||||
.field("svalue", "bbb")
|
||||
.endObject())
|
||||
.get();
|
||||
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
Script scripField = new Script("doc['id'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("id", scripField)
|
||||
.addSort("svalue", SortOrder.ASC)
|
||||
.get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("3"));
|
||||
assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2"));
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("id", new Script("doc['id'].values[0]", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.addSort("svalue", SortOrder.ASC)
|
||||
.get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("3"));
|
||||
assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2"));
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("id", scripField)
|
||||
.addSort("svalue", SortOrder.DESC)
|
||||
.get();
|
||||
|
||||
if (searchResponse.getFailedShards() > 0) {
|
||||
logger.warn("Failed shards:");
|
||||
for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
|
||||
logger.warn("-> {}", shardSearchFailure);
|
||||
}
|
||||
}
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("3"));
|
||||
assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2"));
|
||||
|
||||
// a query with docs just with null values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(termQuery("id", "2"))
|
||||
.addScriptField("id", scripField)
|
||||
.addSort("svalue", SortOrder.DESC)
|
||||
.get();
|
||||
|
||||
if (searchResponse.getFailedShards() > 0) {
|
||||
logger.warn("Failed shards:");
|
||||
for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
|
||||
logger.warn("-> {}", shardSearchFailure);
|
||||
}
|
||||
}
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("2"));
|
||||
}
|
||||
|
||||
public void test2920() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("test", jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("test")
|
||||
.startObject("properties")
|
||||
.startObject("value")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()));
|
||||
ensureGreen();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "test", Integer.toString(i))
|
||||
.setSource(jsonBuilder().startObject().field("value", "" + i).endObject()).get();
|
||||
}
|
||||
refresh();
|
||||
|
||||
Script sortScript = new Script("\u0027\u0027", ScriptType.INLINE, CustomScriptPlugin.NAME, null);
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(scriptSort(sortScript, ScriptSortType.STRING))
|
||||
.setSize(10)
|
||||
.get();
|
||||
assertNoFailures(searchResponse);
|
||||
}
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.stats;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
|
@ -32,19 +32,22 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.search.stats.SearchStats.Stats;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
|
@ -56,13 +59,23 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
|||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(minNumDataNodes = 2)
|
||||
public class SearchStatsTests extends ESIntegTestCase {
|
||||
public class SearchStatsIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return Collections.singleton(CustomScriptPlugin.class);
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends MockScriptPlugin {
|
||||
|
||||
@Override
|
||||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
return Collections.singletonMap("_source.field", vars -> {
|
||||
Map<?, ?> src = (Map) vars.get("_source");
|
||||
return src.get("field");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -108,7 +121,7 @@ public class SearchStatsTests extends ESIntegTestCase {
|
|||
SearchResponse searchResponse = internalCluster().coordOnlyNodeClient().prepareSearch()
|
||||
.setQuery(QueryBuilders.termQuery("field", "value")).setStats("group1", "group2")
|
||||
.highlighter(new HighlightBuilder().field("field"))
|
||||
.addScriptField("scrip1", new Script("_source.field"))
|
||||
.addScriptField("script1", new Script("_source.field", ScriptType.INLINE, CustomScriptPlugin.NAME, null))
|
||||
.setSize(100)
|
||||
.execute().actionGet();
|
||||
assertHitCount(searchResponse, docsTest1 + docsTest2);
|
|
@ -61,7 +61,9 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.indices.InvalidIndexNameException;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.repositories.RepositoriesService;
|
||||
import org.elasticsearch.repositories.RepositoryData;
|
||||
import org.elasticsearch.repositories.RepositoryException;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
|
||||
|
@ -884,7 +886,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
|||
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
|
||||
|
||||
logger.info("--> delete index metadata and shard metadata");
|
||||
Path metadata = repo.resolve("meta-test-snap-1.dat");
|
||||
Path metadata = repo.resolve("meta-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat");
|
||||
Files.delete(metadata);
|
||||
|
||||
logger.info("--> delete snapshot");
|
||||
|
@ -917,7 +919,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
|||
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
|
||||
|
||||
logger.info("--> truncate snapshot file to make it unreadable");
|
||||
Path snapshotPath = repo.resolve("snap-test-snap-1-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat");
|
||||
Path snapshotPath = repo.resolve("snap-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat");
|
||||
try(SeekableByteChannel outChan = Files.newByteChannel(snapshotPath, StandardOpenOption.WRITE)) {
|
||||
outChan.truncate(randomInt(10));
|
||||
}
|
||||
|
@ -2017,6 +2019,9 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
|||
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
|
||||
|
||||
logger.info("--> emulate an orphan snapshot");
|
||||
RepositoriesService repositoriesService = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
|
||||
final RepositoryData repositoryData = repositoriesService.repository(repositoryName).getRepositoryData();
|
||||
final IndexId indexId = repositoryData.resolveIndexId(idxName);
|
||||
|
||||
clusterService.submitStateUpdateTask("orphan snapshot test", new ClusterStateUpdateTask() {
|
||||
|
||||
|
@ -2033,7 +2038,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
|||
true,
|
||||
false,
|
||||
State.ABORTED,
|
||||
Collections.singletonList(idxName),
|
||||
Collections.singletonList(indexId),
|
||||
System.currentTimeMillis(),
|
||||
shards.build()));
|
||||
return ClusterState.builder(currentState).putCustom(SnapshotsInProgress.TYPE, new SnapshotsInProgress(Collections.unmodifiableList(entries))).build();
|
||||
|
@ -2189,7 +2194,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
|||
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
|
||||
|
||||
logger.info("--> truncate snapshot file to make it unreadable");
|
||||
Path snapshotPath = repo.resolve("snap-test-snap-2-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat");
|
||||
Path snapshotPath = repo.resolve("snap-" + createSnapshotResponse.getSnapshotInfo().snapshotId().getUUID() + ".dat");
|
||||
try(SeekableByteChannel outChan = Files.newByteChannel(snapshotPath, StandardOpenOption.WRITE)) {
|
||||
outChan.truncate(randomInt(10));
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugins.RepositoryPlugin;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
import org.elasticsearch.repositories.IndexId;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.snapshots.SnapshotId;
|
||||
|
||||
|
@ -112,8 +113,8 @@ public class MockRepository extends FsRepository {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void initializeSnapshot(SnapshotId snapshotId, List<String> indices, MetaData clusterMetadata) {
|
||||
if (blockOnInitialization ) {
|
||||
public void initializeSnapshot(SnapshotId snapshotId, List<IndexId> indices, MetaData clusterMetadata) {
|
||||
if (blockOnInitialization) {
|
||||
blockExecution();
|
||||
}
|
||||
super.initializeSnapshot(snapshotId, indices, clusterMetadata);
|
||||
|
|
|
@ -106,14 +106,14 @@ public class SimpleTTLIT extends ESIntegTestCase {
|
|||
long now = System.currentTimeMillis();
|
||||
IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1")
|
||||
.setTimestamp(String.valueOf(now)).setTTL(providedTTLValue).setRefreshPolicy(IMMEDIATE).get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
indexResponse = client().prepareIndex("test", "type1", "with_routing").setSource("field1", "value1")
|
||||
.setTimestamp(String.valueOf(now)).setTTL(providedTTLValue).setRouting("routing").setRefreshPolicy(IMMEDIATE).get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
indexResponse = client().prepareIndex("test", "type1", "no_ttl").setSource("field1", "value1").get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
indexResponse = client().prepareIndex("test", "type2", "default_ttl").setSource("field1", "value1").get();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, indexResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
||||
// realtime get check
|
||||
long currentTime = System.currentTimeMillis();
|
||||
|
@ -259,7 +259,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
|
|||
long thirdTtl = aLongTime * 1;
|
||||
IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1")
|
||||
.setTTL(firstTtl).setRefreshPolicy(IMMEDIATE).get();
|
||||
assertTrue(indexResponse.getOperation() == DocWriteResponse.Operation.CREATE);
|
||||
assertTrue(indexResponse.getResult() == DocWriteResponse.Result.CREATED);
|
||||
assertThat(getTtl("type1", 1), both(lessThanOrEqualTo(firstTtl)).and(greaterThan(secondTtl)));
|
||||
|
||||
// Updating with the default detect_noop without a change to the document doesn't change the ttl.
|
||||
|
|
|
@ -371,7 +371,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
.setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject())
|
||||
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null))
|
||||
.execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult());
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -383,7 +383,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
.setUpsert(XContentFactory.jsonBuilder().startObject().field("field", 1).endObject())
|
||||
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null))
|
||||
.execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -412,7 +412,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
.setScriptedUpsert(true)
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "scripted_upsert", params))
|
||||
.execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.CREATE, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.CREATED, updateResponse.getResult());
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -426,7 +426,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
.setScriptedUpsert(true)
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "scripted_upsert", params))
|
||||
.execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -582,7 +582,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet();
|
||||
assertThat(updateResponse.getVersion(), equalTo(2L));
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -595,7 +595,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", params)).execute().actionGet();
|
||||
assertThat(updateResponse.getVersion(), equalTo(3L));
|
||||
assertEquals(DocWriteResponse.Operation.INDEX, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -607,7 +607,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("op", "none")))).execute().actionGet();
|
||||
assertThat(updateResponse.getVersion(), equalTo(3L));
|
||||
assertEquals(DocWriteResponse.Operation.NOOP, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.NOOP, updateResponse.getResult());
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -619,7 +619,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("op", "delete")))).execute().actionGet();
|
||||
assertThat(updateResponse.getVersion(), equalTo(4L));
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, updateResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, updateResponse.getResult());
|
||||
assertThat(updateResponse.getIndex(), equalTo("test"));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
|
|
@ -59,7 +59,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
// Note - external version doesn't throw version conflicts on deletes of non existent records. This is different from internal versioning
|
||||
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(17).setVersionType(VersionType.EXTERNAL).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.NOOP, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult());
|
||||
|
||||
// this should conflict with the delete command transaction which told us that the object was deleted at version 17.
|
||||
assertThrows(
|
||||
|
@ -98,7 +98,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
// deleting with a lower version works.
|
||||
long v = randomIntBetween(12, 14);
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(v).setVersionType(VersionType.FORCE).get();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getVersion(), equalTo(v));
|
||||
}
|
||||
|
||||
|
@ -133,7 +133,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
// Delete with a higher or equal version deletes all versions up to the given one.
|
||||
long v = randomIntBetween(14, 17);
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(v).setVersionType(VersionType.EXTERNAL_GTE).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getVersion(), equalTo(v));
|
||||
|
||||
// Deleting with a lower version keeps on failing after a delete.
|
||||
|
@ -144,7 +144,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
|
||||
// But delete with a higher version is OK.
|
||||
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(18).setVersionType(VersionType.EXTERNAL_GTE).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.NOOP, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getVersion(), equalTo(18L));
|
||||
}
|
||||
|
||||
|
@ -175,7 +175,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
|
||||
// Delete with a higher version deletes all versions up to the given one.
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(17).setVersionType(VersionType.EXTERNAL).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getVersion(), equalTo(17L));
|
||||
|
||||
// Deleting with a lower version keeps on failing after a delete.
|
||||
|
@ -186,7 +186,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
|
||||
// But delete with a higher version is OK.
|
||||
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(18).setVersionType(VersionType.EXTERNAL).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.NOOP, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getVersion(), equalTo(18L));
|
||||
|
||||
|
||||
|
@ -196,7 +196,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
|
||||
|
||||
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(20).setVersionType(VersionType.EXTERNAL).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getVersion(), equalTo(20L));
|
||||
|
||||
// Make sure that the next delete will be GC. Note we do it on the index settings so it will be cleaned up
|
||||
|
@ -281,7 +281,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(2).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.DELETE, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getVersion(), equalTo(3L));
|
||||
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setVersion(2).execute(), VersionConflictEngineException.class);
|
||||
|
@ -290,7 +290,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
// This is intricate - the object was deleted but a delete transaction was with the right version. We add another one
|
||||
// and thus the transaction is increased.
|
||||
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(3).execute().actionGet();
|
||||
assertEquals(DocWriteResponse.Operation.NOOP, deleteResponse.getOperation());
|
||||
assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getVersion(), equalTo(4L));
|
||||
}
|
||||
|
||||
|
@ -479,7 +479,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
sb.append(" version=");
|
||||
sb.append(deleteResponse.getVersion());
|
||||
sb.append(" found=");
|
||||
sb.append(deleteResponse.getOperation() == DocWriteResponse.Operation.DELETE);
|
||||
sb.append(deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
|
||||
} else if (response instanceof IndexResponse) {
|
||||
IndexResponse indexResponse = (IndexResponse) response;
|
||||
sb.append(" index=");
|
||||
|
@ -491,7 +491,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
sb.append(" version=");
|
||||
sb.append(indexResponse.getVersion());
|
||||
sb.append(" created=");
|
||||
sb.append(indexResponse.getOperation() == DocWriteResponse.Operation.CREATE);
|
||||
sb.append(indexResponse.getResult() == DocWriteResponse.Result.CREATED);
|
||||
} else {
|
||||
sb.append(" response: " + response);
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ $ cat requests
|
|||
{ "index" : { "_index" : "test", "_type" : "type1", "_id" : "1" } }
|
||||
{ "field1" : "value1" }
|
||||
$ curl -s -XPOST localhost:9200/_bulk --data-binary "@requests"; echo
|
||||
{"took":7, "errors": false, "items":[{"index":{"_index":"test","_type":"type1","_id":"1","_version":1,"_operation":"create","forced_refresh":false}}]}
|
||||
{"took":7, "errors": false, "items":[{"index":{"_index":"test","_type":"type1","_id":"1","_version":1,"result":"created","forced_refresh":false}}]}
|
||||
--------------------------------------------------
|
||||
|
||||
Because this format uses literal `\n`'s as delimiters, please be sure
|
||||
|
|
|
@ -26,7 +26,7 @@ The result of the above delete operation is:
|
|||
"_type" : "tweet",
|
||||
"_id" : "1",
|
||||
"_version" : 2,
|
||||
"_operation: delete"
|
||||
"result: deleted"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ The result of the above index operation is:
|
|||
"_id" : "1",
|
||||
"_version" : 1,
|
||||
"created" : true,
|
||||
"_operation" : create
|
||||
"result" : created
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"successful" : 2/"successful" : 1/]
|
||||
|
@ -231,7 +231,7 @@ The result of the above index operation is:
|
|||
"_id" : "6a8ca01c-7896-48e9-81cc-9f70661fcb32",
|
||||
"_version" : 1,
|
||||
"created" : true,
|
||||
"_operation": "create"
|
||||
"result": "created"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/6a8ca01c-7896-48e9-81cc-9f70661fcb32/$body._id/ s/"successful" : 2/"successful" : 1/]
|
||||
|
|
|
@ -133,7 +133,7 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
|
|||
--------------------------------------------------
|
||||
|
||||
If `name` was `new_name` before the request was sent then the entire update
|
||||
request is ignored. The `operation` element in the response returns `noop` if
|
||||
request is ignored. The `result` element in the response returns `noop` if
|
||||
the request was ignored.
|
||||
|
||||
[source,js]
|
||||
|
@ -143,7 +143,7 @@ the request was ignored.
|
|||
"_type": "type1",
|
||||
"_id": "1",
|
||||
"_version": 1,
|
||||
"_operation": noop
|
||||
"result": noop
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -341,7 +341,7 @@ as `setRefresh(true)` used to have. See `setRefreshPolicy`'s javadoc for more.
|
|||
Some Java APIs (e.g., `IndicesAdminClient#setSettings`) would support Java properties syntax
|
||||
(line-delimited key=value pairs). This support has been removed.
|
||||
|
||||
=== Render Search Template Java API has been removed
|
||||
==== Render Search Template Java API has been removed
|
||||
|
||||
The Render Search Template Java API including `RenderSearchTemplateAction`, `RenderSearchTemplateRequest` and
|
||||
`RenderSearchTemplateResponse` has been removed in favor of a new `simulate` option in the Search Template Java API.
|
||||
|
|
|
@ -38,10 +38,6 @@ search terms, but it is possible to specify other fields in the query syntax:
|
|||
|
||||
book.\*:(quick brown)
|
||||
|
||||
* where the field `title` has no value (or is missing):
|
||||
|
||||
_missing_:title
|
||||
|
||||
* where the field `title` has any non-null value:
|
||||
|
||||
_exists_:title
|
||||
|
|
|
@ -78,6 +78,8 @@ public class RunningStats implements Writeable, Cloneable {
|
|||
@SuppressWarnings("unchecked")
|
||||
public RunningStats(StreamInput in) throws IOException {
|
||||
this();
|
||||
// read doc count
|
||||
docCount = (Long)in.readGenericValue();
|
||||
// read fieldSum
|
||||
fieldSum = (HashMap<String, Double>)in.readGenericValue();
|
||||
// counts
|
||||
|
@ -96,6 +98,8 @@ public class RunningStats implements Writeable, Cloneable {
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
// marshall doc count
|
||||
out.writeGenericValue(docCount);
|
||||
// marshall fieldSum
|
||||
out.writeGenericValue(fieldSum);
|
||||
// counts
|
||||
|
|
|
@ -1,629 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class IndexLookupTests extends ESIntegTestCase {
|
||||
String includeAllFlag = "_FREQUENCIES | _OFFSETS | _PAYLOADS | _POSITIONS | _CACHE";
|
||||
String includeAllWithoutRecordFlag = "_FREQUENCIES | _OFFSETS | _PAYLOADS | _POSITIONS ";
|
||||
private HashMap<String, List<Object>> expectedEndOffsetsArray;
|
||||
private HashMap<String, List<Object>> expectedPayloadsArray;
|
||||
private HashMap<String, List<Object>> expectedPositionsArray;
|
||||
private HashMap<String, List<Object>> emptyArray;
|
||||
private HashMap<String, List<Object>> expectedStartOffsetsArray;
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
}
|
||||
|
||||
void initTestData() throws InterruptedException, ExecutionException, IOException {
|
||||
emptyArray = new HashMap<>();
|
||||
List<Object> empty1 = new ArrayList<>();
|
||||
empty1.add(-1);
|
||||
empty1.add(-1);
|
||||
emptyArray.put("1", empty1);
|
||||
List<Object> empty2 = new ArrayList<>();
|
||||
empty2.add(-1);
|
||||
empty2.add(-1);
|
||||
emptyArray.put("2", empty2);
|
||||
List<Object> empty3 = new ArrayList<>();
|
||||
empty3.add(-1);
|
||||
empty3.add(-1);
|
||||
emptyArray.put("3", empty3);
|
||||
|
||||
expectedPositionsArray = new HashMap<>();
|
||||
|
||||
List<Object> pos1 = new ArrayList<>();
|
||||
pos1.add(1);
|
||||
pos1.add(2);
|
||||
expectedPositionsArray.put("1", pos1);
|
||||
List<Object> pos2 = new ArrayList<>();
|
||||
pos2.add(0);
|
||||
pos2.add(1);
|
||||
expectedPositionsArray.put("2", pos2);
|
||||
List<Object> pos3 = new ArrayList<>();
|
||||
pos3.add(0);
|
||||
pos3.add(4);
|
||||
expectedPositionsArray.put("3", pos3);
|
||||
|
||||
expectedPayloadsArray = new HashMap<>();
|
||||
List<Object> pay1 = new ArrayList<>();
|
||||
pay1.add(2);
|
||||
pay1.add(3);
|
||||
expectedPayloadsArray.put("1", pay1);
|
||||
List<Object> pay2 = new ArrayList<>();
|
||||
pay2.add(1);
|
||||
pay2.add(2);
|
||||
expectedPayloadsArray.put("2", pay2);
|
||||
List<Object> pay3 = new ArrayList<>();
|
||||
pay3.add(1);
|
||||
pay3.add(-1);
|
||||
expectedPayloadsArray.put("3", pay3);
|
||||
/*
|
||||
* "a|1 b|2 b|3 c|4 d " "b|1 b|2 c|3 d|4 a " "b|1 c|2 d|3 a|4 b "
|
||||
*/
|
||||
expectedStartOffsetsArray = new HashMap<>();
|
||||
List<Object> starts1 = new ArrayList<>();
|
||||
starts1.add(4);
|
||||
starts1.add(8);
|
||||
expectedStartOffsetsArray.put("1", starts1);
|
||||
List<Object> starts2 = new ArrayList<>();
|
||||
starts2.add(0);
|
||||
starts2.add(4);
|
||||
expectedStartOffsetsArray.put("2", starts2);
|
||||
List<Object> starts3 = new ArrayList<>();
|
||||
starts3.add(0);
|
||||
starts3.add(16);
|
||||
expectedStartOffsetsArray.put("3", starts3);
|
||||
|
||||
expectedEndOffsetsArray = new HashMap<>();
|
||||
List<Object> ends1 = new ArrayList<>();
|
||||
ends1.add(7);
|
||||
ends1.add(11);
|
||||
expectedEndOffsetsArray.put("1", ends1);
|
||||
List<Object> ends2 = new ArrayList<>();
|
||||
ends2.add(3);
|
||||
ends2.add(7);
|
||||
expectedEndOffsetsArray.put("2", ends2);
|
||||
List<Object> ends3 = new ArrayList<>();
|
||||
ends3.add(3);
|
||||
ends3.add(17);
|
||||
expectedEndOffsetsArray.put("3", ends3);
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("int_payload_field").field("type", "text").field("index_options", "offsets")
|
||||
.field("analyzer", "payload_int").endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings(
|
||||
Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.payload_int.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.payload_int.filter", "delimited_int")
|
||||
.put("index.analysis.filter.delimited_int.delimiter", "|")
|
||||
.put("index.analysis.filter.delimited_int.encoding", "int")
|
||||
.put("index.analysis.filter.delimited_int.type", "delimited_payload_filter")));
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("int_payload_field", "a|1 b|2 b|3 c|4 d "), client()
|
||||
.prepareIndex("test", "type1", "2").setSource("int_payload_field", "b|1 b|2 c|3 d|4 a "),
|
||||
client().prepareIndex("test", "type1", "3").setSource("int_payload_field", "b|1 c|2 d|3 a|4 b "));
|
||||
ensureGreen();
|
||||
}
|
||||
|
||||
public void testTwoScripts() throws Exception {
|
||||
initTestData();
|
||||
|
||||
// check term frequencies for 'a'
|
||||
Script scriptFieldScript = new Script("term = _index['int_payload_field']['c']; term.tf()");
|
||||
scriptFieldScript = new Script("1");
|
||||
Script scoreScript = new Script("term = _index['int_payload_field']['b']; term.tf()");
|
||||
Map<String, Object> expectedResultsField = new HashMap<>();
|
||||
expectedResultsField.put("1", 1);
|
||||
expectedResultsField.put("2", 1);
|
||||
expectedResultsField.put("3", 1);
|
||||
Map<String, Object> expectedResultsScore = new HashMap<>();
|
||||
expectedResultsScore.put("1", 2f);
|
||||
expectedResultsScore.put("2", 2f);
|
||||
expectedResultsScore.put("3", 2f);
|
||||
checkOnlyFunctionScore(scoreScript, expectedResultsScore, 3);
|
||||
checkValueInEachDocWithFunctionScore(scriptFieldScript, expectedResultsField, scoreScript, expectedResultsScore, 3);
|
||||
|
||||
}
|
||||
|
||||
public void testCallWithDifferentFlagsFails() throws Exception {
|
||||
initTestData();
|
||||
|
||||
// should throw an exception, we cannot call with different flags twice
|
||||
// if the flags of the second call were not included in the first call.
|
||||
Script script = new Script("term = _index['int_payload_field']['b']; return _index['int_payload_field'].get('b', _POSITIONS).tf();");
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script).execute().actionGet();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(
|
||||
"got: " + e.toString(),
|
||||
e.toString()
|
||||
.indexOf(
|
||||
"You must call get with all required flags! Instead of _index['int_payload_field'].get('b', _FREQUENCIES) and _index['int_payload_field'].get('b', _POSITIONS) call _index['int_payload_field'].get('b', _FREQUENCIES | _POSITIONS) once]"),
|
||||
Matchers.greaterThan(-1));
|
||||
}
|
||||
|
||||
// Should not throw an exception this way round
|
||||
script = new Script(
|
||||
"term = _index['int_payload_field'].get('b', _POSITIONS | _FREQUENCIES);return _index['int_payload_field']['b'].tf();");
|
||||
client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script).execute().actionGet();
|
||||
}
|
||||
|
||||
private void checkOnlyFunctionScore(Script scoreScript, Map<String, Object> expectedScore, int numExpectedDocs) {
|
||||
SearchResponse sr = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.functionScoreQuery(ScoreFunctionBuilders.scriptFunction(scoreScript))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(sr, numExpectedDocs);
|
||||
for (SearchHit hit : sr.getHits().getHits()) {
|
||||
assertThat("for doc " + hit.getId(), ((Float) expectedScore.get(hit.getId())).doubleValue(),
|
||||
Matchers.closeTo(hit.score(), 1.e-4));
|
||||
}
|
||||
}
|
||||
|
||||
public void testDocumentationExample() throws Exception {
|
||||
initTestData();
|
||||
|
||||
Script script = new Script("term = _index['float_payload_field'].get('b'," + includeAllFlag
|
||||
+ "); payloadSum=0; for (pos in term) {payloadSum = pos.payloadAsInt(0)}; payloadSum");
|
||||
|
||||
// non existing field: sum should be 0
|
||||
HashMap<String, Object> zeroArray = new HashMap<>();
|
||||
zeroArray.put("1", 0);
|
||||
zeroArray.put("2", 0);
|
||||
zeroArray.put("3", 0);
|
||||
checkValueInEachDoc(script, zeroArray, 3);
|
||||
|
||||
script = new Script("term = _index['int_payload_field'].get('b'," + includeAllFlag
|
||||
+ "); payloadSum=0; for (pos in term) {payloadSum = payloadSum + pos.payloadAsInt(0)}; payloadSum");
|
||||
|
||||
// existing field: sums should be as here:
|
||||
zeroArray.put("1", 5);
|
||||
zeroArray.put("2", 3);
|
||||
zeroArray.put("3", 1);
|
||||
checkValueInEachDoc(script, zeroArray, 3);
|
||||
}
|
||||
|
||||
public void testIteratorAndRecording() throws Exception {
|
||||
initTestData();
|
||||
|
||||
// call twice with record: should work as expected
|
||||
Script script = createPositionsArrayScriptIterateTwice("b", includeAllFlag, "position");
|
||||
checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
|
||||
script = createPositionsArrayScriptIterateTwice("b", includeAllFlag, "startOffset");
|
||||
checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
|
||||
script = createPositionsArrayScriptIterateTwice("b", includeAllFlag, "endOffset");
|
||||
checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
|
||||
script = createPositionsArrayScriptIterateTwice("b", includeAllFlag, "payloadAsInt(-1)");
|
||||
checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
|
||||
|
||||
// no record and get iterator twice: should fail
|
||||
script = createPositionsArrayScriptIterateTwice("b", includeAllWithoutRecordFlag, "position");
|
||||
checkExceptions(script);
|
||||
script = createPositionsArrayScriptIterateTwice("b", includeAllWithoutRecordFlag, "startOffset");
|
||||
checkExceptions(script);
|
||||
script = createPositionsArrayScriptIterateTwice("b", includeAllWithoutRecordFlag, "endOffset");
|
||||
checkExceptions(script);
|
||||
script = createPositionsArrayScriptIterateTwice("b", includeAllWithoutRecordFlag, "payloadAsInt(-1)");
|
||||
checkExceptions(script);
|
||||
|
||||
// no record and get termObject twice and iterate: should fail
|
||||
script = createPositionsArrayScriptGetInfoObjectTwice("b", includeAllWithoutRecordFlag, "position");
|
||||
checkExceptions(script);
|
||||
script = createPositionsArrayScriptGetInfoObjectTwice("b", includeAllWithoutRecordFlag, "startOffset");
|
||||
checkExceptions(script);
|
||||
script = createPositionsArrayScriptGetInfoObjectTwice("b", includeAllWithoutRecordFlag, "endOffset");
|
||||
checkExceptions(script);
|
||||
script = createPositionsArrayScriptGetInfoObjectTwice("b", includeAllWithoutRecordFlag, "payloadAsInt(-1)");
|
||||
checkExceptions(script);
|
||||
|
||||
}
|
||||
|
||||
private Script createPositionsArrayScriptGetInfoObjectTwice(String term, String flags, String what) {
|
||||
String script = "term = _index['int_payload_field'].get('" + term + "'," + flags
|
||||
+ "); array=[]; for (pos in term) {array.add(pos." + what + ")}; _index['int_payload_field'].get('" + term + "',"
|
||||
+ flags + "); array=[]; for (pos in term) {array.add(pos." + what + ")}";
|
||||
return new Script(script);
|
||||
}
|
||||
|
||||
private Script createPositionsArrayScriptIterateTwice(String term, String flags, String what) {
|
||||
String script = "term = _index['int_payload_field'].get('" + term + "'," + flags
|
||||
+ "); array=[]; for (pos in term) {array.add(pos." + what + ")}; array=[]; for (pos in term) {array.add(pos." + what
|
||||
+ ")}; array";
|
||||
return new Script(script);
|
||||
}
|
||||
|
||||
private Script createPositionsArrayScript(String field, String term, String flags, String what) {
|
||||
String script = "term = _index['" + field + "'].get('" + term + "'," + flags
|
||||
+ "); array=[]; for (pos in term) {array.add(pos." + what + ")}; array";
|
||||
return new Script(script);
|
||||
}
|
||||
|
||||
private Script createPositionsArrayScriptDefaultGet(String field, String term, String what) {
|
||||
String script = "term = _index['" + field + "']['" + term + "']; array=[]; for (pos in term) {array.add(pos." + what
|
||||
+ ")}; array";
|
||||
return new Script(script);
|
||||
}
|
||||
|
||||
public void testFlags() throws Exception {
|
||||
initTestData();
|
||||
|
||||
// check default flag
|
||||
Script script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "position");
|
||||
// there should be no positions
|
||||
/* TODO: the following tests fail with the new postings enum apis because of a bogus assert in BlockDocsEnum
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "startOffset");
|
||||
// there should be no offsets
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "endOffset");
|
||||
// there should be no offsets
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
script = createPositionsArrayScriptDefaultGet("int_payload_field", "b", "payloadAsInt(-1)");
|
||||
// there should be no payload
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
|
||||
// check FLAG_FREQUENCIES flag
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "position");
|
||||
// there should be no positions
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "startOffset");
|
||||
// there should be no offsets
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "endOffset");
|
||||
// there should be no offsets
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_FREQUENCIES", "payloadAsInt(-1)");
|
||||
// there should be no payloads
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);*/
|
||||
|
||||
// check FLAG_POSITIONS flag
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "position");
|
||||
// there should be positions
|
||||
checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
|
||||
/* TODO: these tests make a bogus assumption that asking for positions will return only positions
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "startOffset");
|
||||
// there should be no offsets
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "endOffset");
|
||||
// there should be no offsets
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_POSITIONS", "payloadAsInt(-1)");
|
||||
// there should be no payloads
|
||||
checkArrayValsInEachDoc(script, emptyArray, 3);*/
|
||||
|
||||
// check FLAG_OFFSETS flag
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "position");
|
||||
// there should be positions and s forth ...
|
||||
checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "startOffset");
|
||||
checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "endOffset");
|
||||
checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_OFFSETS", "payloadAsInt(-1)");
|
||||
checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
|
||||
|
||||
// check FLAG_PAYLOADS flag
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "position");
|
||||
checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "startOffset");
|
||||
checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "endOffset");
|
||||
checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", "_PAYLOADS", "payloadAsInt(-1)");
|
||||
checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
|
||||
|
||||
// check all flags
|
||||
String allFlags = "_POSITIONS | _OFFSETS | _PAYLOADS";
|
||||
script = createPositionsArrayScript("int_payload_field", "b", allFlags, "position");
|
||||
checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", allFlags, "startOffset");
|
||||
checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", allFlags, "endOffset");
|
||||
checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", allFlags, "payloadAsInt(-1)");
|
||||
checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
|
||||
|
||||
// check all flags without record
|
||||
script = createPositionsArrayScript("int_payload_field", "b", includeAllWithoutRecordFlag, "position");
|
||||
checkArrayValsInEachDoc(script, expectedPositionsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", includeAllWithoutRecordFlag, "startOffset");
|
||||
checkArrayValsInEachDoc(script, expectedStartOffsetsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", includeAllWithoutRecordFlag, "endOffset");
|
||||
checkArrayValsInEachDoc(script, expectedEndOffsetsArray, 3);
|
||||
script = createPositionsArrayScript("int_payload_field", "b", includeAllWithoutRecordFlag, "payloadAsInt(-1)");
|
||||
checkArrayValsInEachDoc(script, expectedPayloadsArray, 3);
|
||||
|
||||
}
|
||||
|
||||
private void checkArrayValsInEachDoc(Script script, HashMap<String, List<Object>> expectedArray, int expectedHitSize) {
|
||||
SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script)
|
||||
.execute().actionGet();
|
||||
assertHitCount(sr, expectedHitSize);
|
||||
int nullCounter = 0;
|
||||
for (SearchHit hit : sr.getHits().getHits()) {
|
||||
Object result = hit.getFields().get("tvtest").getValues();
|
||||
Object expectedResult = expectedArray.get(hit.getId());
|
||||
assertThat("for doc " + hit.getId(), result, equalTo(expectedResult));
|
||||
if (expectedResult != null) {
|
||||
nullCounter++;
|
||||
}
|
||||
}
|
||||
assertThat(nullCounter, equalTo(expectedArray.size()));
|
||||
}
|
||||
|
||||
public void testAllExceptPosAndOffset() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("float_payload_field").field("type", "text").field("index_options", "offsets").field("term_vector", "no")
|
||||
.field("analyzer", "payload_float").endObject().startObject("string_payload_field").field("type", "text")
|
||||
.field("index_options", "offsets").field("term_vector", "no").field("analyzer", "payload_string").endObject()
|
||||
.startObject("int_payload_field").field("type", "text").field("index_options", "offsets")
|
||||
.field("analyzer", "payload_int").endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping).setSettings(
|
||||
Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put("index.analysis.analyzer.payload_float.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.payload_float.filter", "delimited_float")
|
||||
.put("index.analysis.filter.delimited_float.delimiter", "|")
|
||||
.put("index.analysis.filter.delimited_float.encoding", "float")
|
||||
.put("index.analysis.filter.delimited_float.type", "delimited_payload_filter")
|
||||
.put("index.analysis.analyzer.payload_string.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.payload_string.filter", "delimited_string")
|
||||
.put("index.analysis.filter.delimited_string.delimiter", "|")
|
||||
.put("index.analysis.filter.delimited_string.encoding", "identity")
|
||||
.put("index.analysis.filter.delimited_string.type", "delimited_payload_filter")
|
||||
.put("index.analysis.analyzer.payload_int.tokenizer", "whitespace")
|
||||
.putArray("index.analysis.analyzer.payload_int.filter", "delimited_int")
|
||||
.put("index.analysis.filter.delimited_int.delimiter", "|")
|
||||
.put("index.analysis.filter.delimited_int.encoding", "int")
|
||||
.put("index.analysis.filter.delimited_int.type", "delimited_payload_filter")
|
||||
.put("index.number_of_shards", 1)));
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("float_payload_field", "a|1 b|2 a|3 b "), client()
|
||||
.prepareIndex("test", "type1", "2").setSource("string_payload_field", "a|a b|b a|a b "),
|
||||
client().prepareIndex("test", "type1", "3").setSource("float_payload_field", "a|4 b|5 a|6 b "),
|
||||
client().prepareIndex("test", "type1", "4").setSource("string_payload_field", "a|b b|a a|b b "),
|
||||
client().prepareIndex("test", "type1", "5").setSource("float_payload_field", "c "),
|
||||
client().prepareIndex("test", "type1", "6").setSource("int_payload_field", "c|1"));
|
||||
|
||||
// get the number of all docs
|
||||
Script script = new Script("_index.numDocs()");
|
||||
checkValueInEachDoc(6, script, 6);
|
||||
|
||||
// get the number of docs with field float_payload_field
|
||||
script = new Script("_index['float_payload_field'].docCount()");
|
||||
checkValueInEachDoc(3, script, 6);
|
||||
|
||||
// corner case: what if the field does not exist?
|
||||
script = new Script("_index['non_existent_field'].docCount()");
|
||||
checkValueInEachDoc(0, script, 6);
|
||||
|
||||
// get the number of all tokens in all docs
|
||||
script = new Script("_index['float_payload_field'].sumttf()");
|
||||
checkValueInEachDoc(9, script, 6);
|
||||
|
||||
// corner case get the number of all tokens in all docs for non existent
|
||||
// field
|
||||
script = new Script("_index['non_existent_field'].sumttf()");
|
||||
checkValueInEachDoc(0, script, 6);
|
||||
|
||||
// get the sum of doc freqs in all docs
|
||||
script = new Script("_index['float_payload_field'].sumdf()");
|
||||
checkValueInEachDoc(5, script, 6);
|
||||
|
||||
// get the sum of doc freqs in all docs for non existent field
|
||||
script = new Script("_index['non_existent_field'].sumdf()");
|
||||
checkValueInEachDoc(0, script, 6);
|
||||
|
||||
// check term frequencies for 'a'
|
||||
script = new Script("term = _index['float_payload_field']['a']; if (term != null) {term.tf()}");
|
||||
Map<String, Object> expectedResults = new HashMap<>();
|
||||
expectedResults.put("1", 2);
|
||||
expectedResults.put("2", 0);
|
||||
expectedResults.put("3", 2);
|
||||
expectedResults.put("4", 0);
|
||||
expectedResults.put("5", 0);
|
||||
expectedResults.put("6", 0);
|
||||
checkValueInEachDoc(script, expectedResults, 6);
|
||||
expectedResults.clear();
|
||||
|
||||
// check doc frequencies for 'c'
|
||||
script = new Script("term = _index['float_payload_field']['c']; if (term != null) {term.df()}");
|
||||
expectedResults.put("1", 1L);
|
||||
expectedResults.put("2", 1L);
|
||||
expectedResults.put("3", 1L);
|
||||
expectedResults.put("4", 1L);
|
||||
expectedResults.put("5", 1L);
|
||||
expectedResults.put("6", 1L);
|
||||
checkValueInEachDoc(script, expectedResults, 6);
|
||||
expectedResults.clear();
|
||||
|
||||
// check doc frequencies for term that does not exist
|
||||
script = new Script("term = _index['float_payload_field']['non_existent_term']; if (term != null) {term.df()}");
|
||||
expectedResults.put("1", 0L);
|
||||
expectedResults.put("2", 0L);
|
||||
expectedResults.put("3", 0L);
|
||||
expectedResults.put("4", 0L);
|
||||
expectedResults.put("5", 0L);
|
||||
expectedResults.put("6", 0L);
|
||||
checkValueInEachDoc(script, expectedResults, 6);
|
||||
expectedResults.clear();
|
||||
|
||||
// check doc frequencies for term that does not exist
|
||||
script = new Script("term = _index['non_existent_field']['non_existent_term']; if (term != null) {term.tf()}");
|
||||
expectedResults.put("1", 0);
|
||||
expectedResults.put("2", 0);
|
||||
expectedResults.put("3", 0);
|
||||
expectedResults.put("4", 0);
|
||||
expectedResults.put("5", 0);
|
||||
expectedResults.put("6", 0);
|
||||
checkValueInEachDoc(script, expectedResults, 6);
|
||||
expectedResults.clear();
|
||||
|
||||
// check total term frequencies for 'a'
|
||||
script = new Script("term = _index['float_payload_field']['a']; if (term != null) {term.ttf()}");
|
||||
expectedResults.put("1", 4L);
|
||||
expectedResults.put("2", 4L);
|
||||
expectedResults.put("3", 4L);
|
||||
expectedResults.put("4", 4L);
|
||||
expectedResults.put("5", 4L);
|
||||
expectedResults.put("6", 4L);
|
||||
checkValueInEachDoc(script, expectedResults, 6);
|
||||
expectedResults.clear();
|
||||
|
||||
// check float payload for 'b'
|
||||
HashMap<String, List<Object>> expectedPayloadsArray = new HashMap<>();
|
||||
script = createPositionsArrayScript("float_payload_field", "b", includeAllFlag, "payloadAsFloat(-1)");
|
||||
float missingValue = -1;
|
||||
List<Object> payloadsFor1 = new ArrayList<>();
|
||||
payloadsFor1.add(2f);
|
||||
payloadsFor1.add(missingValue);
|
||||
expectedPayloadsArray.put("1", payloadsFor1);
|
||||
List<Object> payloadsFor2 = new ArrayList<>();
|
||||
payloadsFor2.add(5f);
|
||||
payloadsFor2.add(missingValue);
|
||||
expectedPayloadsArray.put("3", payloadsFor2);
|
||||
expectedPayloadsArray.put("6", new ArrayList<>());
|
||||
expectedPayloadsArray.put("5", new ArrayList<>());
|
||||
expectedPayloadsArray.put("4", new ArrayList<>());
|
||||
expectedPayloadsArray.put("2", new ArrayList<>());
|
||||
checkArrayValsInEachDoc(script, expectedPayloadsArray, 6);
|
||||
|
||||
// check string payload for 'b'
|
||||
expectedPayloadsArray.clear();
|
||||
payloadsFor1.clear();
|
||||
payloadsFor2.clear();
|
||||
script = createPositionsArrayScript("string_payload_field", "b", includeAllFlag, "payloadAsString()");
|
||||
payloadsFor1.add("b");
|
||||
payloadsFor1.add(null);
|
||||
expectedPayloadsArray.put("2", payloadsFor1);
|
||||
payloadsFor2.add("a");
|
||||
payloadsFor2.add(null);
|
||||
expectedPayloadsArray.put("4", payloadsFor2);
|
||||
expectedPayloadsArray.put("6", new ArrayList<>());
|
||||
expectedPayloadsArray.put("5", new ArrayList<>());
|
||||
expectedPayloadsArray.put("3", new ArrayList<>());
|
||||
expectedPayloadsArray.put("1", new ArrayList<>());
|
||||
checkArrayValsInEachDoc(script, expectedPayloadsArray, 6);
|
||||
|
||||
// check int payload for 'c'
|
||||
expectedPayloadsArray.clear();
|
||||
payloadsFor1.clear();
|
||||
payloadsFor2.clear();
|
||||
script = createPositionsArrayScript("int_payload_field", "c", includeAllFlag, "payloadAsInt(-1)");
|
||||
payloadsFor1 = new ArrayList<>();
|
||||
payloadsFor1.add(1);
|
||||
expectedPayloadsArray.put("6", payloadsFor1);
|
||||
expectedPayloadsArray.put("5", new ArrayList<>());
|
||||
expectedPayloadsArray.put("4", new ArrayList<>());
|
||||
expectedPayloadsArray.put("3", new ArrayList<>());
|
||||
expectedPayloadsArray.put("2", new ArrayList<>());
|
||||
expectedPayloadsArray.put("1", new ArrayList<>());
|
||||
checkArrayValsInEachDoc(script, expectedPayloadsArray, 6);
|
||||
|
||||
}
|
||||
|
||||
private void checkExceptions(Script script) {
|
||||
try {
|
||||
SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script)
|
||||
.execute().actionGet();
|
||||
assertThat(sr.getHits().hits().length, equalTo(0));
|
||||
ShardSearchFailure[] shardFails = sr.getShardFailures();
|
||||
for (ShardSearchFailure fail : shardFails) {
|
||||
assertThat(fail.reason().indexOf("Cannot iterate twice! If you want to iterate more that once, add _CACHE explicitly."),
|
||||
Matchers.greaterThan(-1));
|
||||
}
|
||||
} catch (SearchPhaseExecutionException ex) {
|
||||
assertThat(
|
||||
"got " + ex.toString(),
|
||||
ex.toString().indexOf("Cannot iterate twice! If you want to iterate more that once, add _CACHE explicitly."),
|
||||
Matchers.greaterThan(-1));
|
||||
}
|
||||
}
|
||||
|
||||
private void checkValueInEachDocWithFunctionScore(Script fieldScript, Map<String, Object> expectedFieldVals, Script scoreScript,
|
||||
Map<String, Object> expectedScore, int numExpectedDocs) {
|
||||
SearchResponse sr = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.functionScoreQuery(ScoreFunctionBuilders.scriptFunction(scoreScript)))
|
||||
.addScriptField("tvtest", fieldScript).execute().actionGet();
|
||||
assertHitCount(sr, numExpectedDocs);
|
||||
for (SearchHit hit : sr.getHits().getHits()) {
|
||||
Object result = hit.getFields().get("tvtest").getValues().get(0);
|
||||
Object expectedResult = expectedFieldVals.get(hit.getId());
|
||||
assertThat("for doc " + hit.getId(), result, equalTo(expectedResult));
|
||||
assertThat("for doc " + hit.getId(), ((Float) expectedScore.get(hit.getId())).doubleValue(),
|
||||
Matchers.closeTo(hit.score(), 1.e-4));
|
||||
}
|
||||
}
|
||||
|
||||
private void checkValueInEachDoc(Script script, Map<String, Object> expectedResults, int numExpectedDocs) {
|
||||
SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script)
|
||||
.execute().actionGet();
|
||||
assertHitCount(sr, numExpectedDocs);
|
||||
for (SearchHit hit : sr.getHits().getHits()) {
|
||||
Object result = hit.getFields().get("tvtest").getValues().get(0);
|
||||
Object expectedResult = expectedResults.get(hit.getId());
|
||||
assertThat("for doc " + hit.getId(), result, equalTo(expectedResult));
|
||||
}
|
||||
}
|
||||
|
||||
private void checkValueInEachDoc(int value, Script script, int numExpectedDocs) {
|
||||
SearchResponse sr = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).addScriptField("tvtest", script)
|
||||
.execute().actionGet();
|
||||
assertHitCount(sr, numExpectedDocs);
|
||||
for (SearchHit hit : sr.getHits().getHits()) {
|
||||
Object result = hit.getFields().get("tvtest").getValues().get(0);
|
||||
if (result instanceof Integer) {
|
||||
assertThat((Integer)result, equalTo(value));
|
||||
} else if (result instanceof Long) {
|
||||
assertThat(((Long) result).intValue(), equalTo(value));
|
||||
} else {
|
||||
fail();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,334 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SimpleSortTests extends ESIntegTestCase {
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public void testSimpleSorts() throws Exception {
|
||||
Random random = random();
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("str_value").field("type", "keyword").endObject()
|
||||
.startObject("boolean_value").field("type", "boolean").endObject()
|
||||
.startObject("byte_value").field("type", "byte").endObject()
|
||||
.startObject("short_value").field("type", "short").endObject()
|
||||
.startObject("integer_value").field("type", "integer").endObject()
|
||||
.startObject("long_value").field("type", "long").endObject()
|
||||
.startObject("float_value").field("type", "float").endObject()
|
||||
.startObject("double_value").field("type", "double").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
List<IndexRequestBuilder> builders = new ArrayList<>();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
IndexRequestBuilder builder = client().prepareIndex("test", "type1", Integer.toString(i)).setSource(jsonBuilder().startObject()
|
||||
.field("str_value", new String(new char[]{(char) (97 + i), (char) (97 + i)}))
|
||||
.field("boolean_value", true)
|
||||
.field("byte_value", i)
|
||||
.field("short_value", i)
|
||||
.field("integer_value", i)
|
||||
.field("long_value", i)
|
||||
.field("float_value", 0.1 * i)
|
||||
.field("double_value", 0.1 * i)
|
||||
.endObject());
|
||||
builders.add(builder);
|
||||
}
|
||||
Collections.shuffle(builders, random);
|
||||
for (IndexRequestBuilder builder : builders) {
|
||||
builder.execute().actionGet();
|
||||
if (random.nextBoolean()) {
|
||||
if (random.nextInt(5) != 0) {
|
||||
refresh();
|
||||
} else {
|
||||
client().admin().indices().prepareFlush().execute().actionGet();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
refresh();
|
||||
|
||||
// STRING script
|
||||
int size = 1 + random.nextInt(10);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(size)
|
||||
.addSort(new ScriptSortBuilder(new Script("doc['str_value'].value"), ScriptSortType.STRING)).execute().actionGet();
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
|
||||
assertThat(searchResponse.getHits().getAt(i).sortValues()[0].toString(), equalTo(new String(new char[] { (char) (97 + i),
|
||||
(char) (97 + i) })));
|
||||
}
|
||||
size = 1 + random.nextInt(10);
|
||||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));
|
||||
assertThat(searchResponse.getHits().getAt(i).sortValues()[0].toString(), equalTo(new String(new char[] { (char) (97 + (9 - i)),
|
||||
(char) (97 + (9 - i)) })));
|
||||
}
|
||||
|
||||
assertThat(searchResponse.toString(), not(containsString("error")));
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
}
|
||||
|
||||
public void testSortMinValueScript() throws IOException {
|
||||
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("lvalue").field("type", "long").endObject()
|
||||
.startObject("dvalue").field("type", "double").endObject()
|
||||
.startObject("svalue").field("type", "keyword").endObject()
|
||||
.startObject("gvalue").field("type", "geo_point").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
ensureGreen();
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
IndexRequestBuilder req = client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder().startObject()
|
||||
.field("ord", i)
|
||||
.field("svalue", new String[]{"" + i, "" + (i + 1), "" + (i + 2)})
|
||||
.field("lvalue", new long[]{i, i + 1, i + 2})
|
||||
.field("dvalue", new double[]{i, i + 1, i + 2})
|
||||
.startObject("gvalue")
|
||||
.field("lat", (double) i + 1)
|
||||
.field("lon", (double) i)
|
||||
.endObject()
|
||||
.endObject());
|
||||
req.execute().actionGet();
|
||||
}
|
||||
|
||||
for (int i = 10; i < 20; i++) { // add some docs that don't have values in those fields
|
||||
client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder().startObject()
|
||||
.field("ord", i)
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
client().admin().indices().prepareRefresh("test").execute().actionGet();
|
||||
|
||||
// test the long values
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("min", new Script("retval = Long.MAX_VALUE; for (v in doc['lvalue'].values){ retval = min(v, retval) }; retval"))
|
||||
.addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")).setSize(10)
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Long) searchResponse.getHits().getAt(i).field("min").value(), equalTo((long) i));
|
||||
}
|
||||
// test the double values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("min", new Script("retval = Double.MAX_VALUE; for (v in doc['dvalue'].values){ retval = min(v, retval) }; retval"))
|
||||
.addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")).setSize(10)
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), equalTo((double) i));
|
||||
}
|
||||
|
||||
// test the string values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("min", new Script("retval = Integer.MAX_VALUE; for (v in doc['svalue'].values){ retval = min(Integer.parseInt(v), retval) }; retval"))
|
||||
.addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")).setSize(10)
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Integer) searchResponse.getHits().getAt(i).field("min").value(), equalTo(i));
|
||||
}
|
||||
|
||||
// test the geopoint values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("min", new Script("retval = Double.MAX_VALUE; for (v in doc['gvalue'].values){ retval = min(v.lon, retval) }; retval"))
|
||||
.addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")).setSize(10)
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo(i, GeoUtils.TOLERANCE));
|
||||
}
|
||||
}
|
||||
|
||||
public void testDocumentsWithNullValue() throws Exception {
|
||||
// TODO: sort shouldn't fail when sort field is mapped dynamically
|
||||
// We have to specify mapping explicitly because by the time search is performed dynamic mapping might not
|
||||
// be propagated to all nodes yet and sort operation fail when the sort field is not defined
|
||||
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("id").field("type", "keyword").endObject()
|
||||
.startObject("svalue").field("type", "keyword").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("id", "1")
|
||||
.field("svalue", "aaa")
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("id", "2")
|
||||
.nullField("svalue")
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("id", "3")
|
||||
.field("svalue", "bbb")
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("id", new Script("doc['id'].value"))
|
||||
.addSort("svalue", SortOrder.ASC)
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("3"));
|
||||
assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2"));
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("id", new Script("doc['id'].values[0]"))
|
||||
.addSort("svalue", SortOrder.ASC)
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("3"));
|
||||
assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2"));
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("id", new Script("doc['id'].value"))
|
||||
.addSort("svalue", SortOrder.DESC)
|
||||
.execute().actionGet();
|
||||
|
||||
if (searchResponse.getFailedShards() > 0) {
|
||||
logger.warn("Failed shards:");
|
||||
for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
|
||||
logger.warn("-> {}", shardSearchFailure);
|
||||
}
|
||||
}
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("3"));
|
||||
assertThat(searchResponse.getHits().getAt(1).field("id").value(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(2).field("id").value(), equalTo("2"));
|
||||
|
||||
// a query with docs just with null values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(termQuery("id", "2"))
|
||||
.addScriptField("id", new Script("doc['id'].value"))
|
||||
.addSort("svalue", SortOrder.DESC)
|
||||
.execute().actionGet();
|
||||
|
||||
if (searchResponse.getFailedShards() > 0) {
|
||||
logger.warn("Failed shards:");
|
||||
for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
|
||||
logger.warn("-> {}", shardSearchFailure);
|
||||
}
|
||||
}
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).field("id").value(), equalTo("2"));
|
||||
}
|
||||
|
||||
public void test2920() throws IOException {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"test",
|
||||
jsonBuilder().startObject().startObject("test").startObject("properties").startObject("value").field("type", "keyword")
|
||||
.endObject().endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "test", Integer.toString(i))
|
||||
.setSource(jsonBuilder().startObject().field("value", "" + i).endObject()).execute().actionGet();
|
||||
}
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.scriptSort(new Script("\u0027\u0027"), ScriptSortType.STRING)).setSize(10).execute().actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
}
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This package contains tests that use groovy to test what looks
|
||||
* to be unrelated functionality, or functionality that should be
|
||||
* tested with a mock instead. Instead of doing an epic battle
|
||||
* with these tests, they are temporarily moved here to the groovy
|
||||
* plugin's tests, but that is likely not where they belong. Please
|
||||
* help by cleaning them up and we can remove this package!
|
||||
*
|
||||
* <ul>
|
||||
* <li>If the test is actually testing groovy specifically, move to
|
||||
* the org.elasticsearch.script.groovy tests package of this plugin</li>
|
||||
* <li>If the test is testing scripting integration with another core subsystem,
|
||||
* fix it to use a mock instead, so it can be in the core tests again</li>
|
||||
* <li>If the test is just being lazy, and does not really need scripting to test
|
||||
* something, clean it up!</li>
|
||||
* </ul>
|
||||
*/
|
||||
/* List of renames that took place:
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/AvgTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/document/BulkIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ChildQuerySearchTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java
|
||||
^^^^^ note: the methods from this test using mustache were moved to the mustache module under its messy tests package.
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateHistogramTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/script/IndexLookupIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/script/IndexedScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/InnerHitsTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/PercolatorTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/stats/SearchStatsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchStatsTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/timeout/SearchTimeoutIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchTimeoutTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SignificantTermsSignificanceScoreTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/nested/SimpleNestedIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleNestedTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SumTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TopHitsTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/index/mapper/TransformOnIndexMapperIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TransformOnIndexMapperTests.java
|
||||
renamed: core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptCompilationException.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java
|
||||
renamed: core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/script/GroovySecurityIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java
|
||||
renamed: core/src/test/resources/org/elasticsearch/search/aggregations/metrics/scripted/conf/scripts/combine_script.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/combine_script.groovy
|
||||
renamed: core/src/test/resources/org/elasticsearch/search/aggregations/metrics/scripted/conf/scripts/init_script.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/init_script.groovy
|
||||
renamed: core/src/test/resources/org/elasticsearch/search/aggregations/metrics/scripted/conf/scripts/map_script.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/map_script.groovy
|
||||
renamed: core/src/test/resources/org/elasticsearch/search/aggregations/metrics/scripted/conf/scripts/reduce_script.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/reduce_script.groovy
|
||||
renamed: core/src/test/resources/org/elasticsearch/search/aggregations/bucket/config/scripts/significance_script_no_params.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_no_params.groovy
|
||||
renamed: core/src/test/resources/org/elasticsearch/search/aggregations/bucket/config/scripts/significance_script_with_params.groovy -> plugins/lang-groovy/src/test/resources/org/elasticsearch/messy/tests/conf/scripts/significance_script_with_params.groovy
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.script.groovy;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
|
@ -51,7 +51,7 @@ import static org.hamcrest.Matchers.containsString;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class IndexedScriptTests extends ESIntegTestCase {
|
||||
public class GroovyIndexedScriptTests extends ESIntegTestCase {
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
|
@ -1 +0,0 @@
|
|||
newaggregation = []; sum = 0;for (a in _agg) { sum += a}; newaggregation.add(sum); return newaggregation
|
|
@ -1 +0,0 @@
|
|||
vars.multiplier = 3
|
|
@ -1 +0,0 @@
|
|||
_agg.add(vars.multiplier)
|
|
@ -1 +0,0 @@
|
|||
newaggregation = []; sum = 0;for (aggregation in _aggs) { for (a in aggregation) { sum += a} }; newaggregation.add(sum); return newaggregation
|
|
@ -1 +0,0 @@
|
|||
return _subset_freq + _subset_size + _superset_freq + _superset_size
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue