Merge branch 'master' into close-index-api-refactoring
This commit is contained in:
commit
cd3a1af77b
|
@ -163,8 +163,8 @@ task verifyVersions {
|
|||
* the enabled state of every bwc task. It should be set back to true
|
||||
* after the backport of the backcompat code is complete.
|
||||
*/
|
||||
final boolean bwc_tests_enabled = true
|
||||
final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */
|
||||
final boolean bwc_tests_enabled = false
|
||||
final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/36555" /* place a PR link here when committing bwc changes */
|
||||
if (bwc_tests_enabled == false) {
|
||||
if (bwc_tests_disabled_issue.isEmpty()) {
|
||||
throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false")
|
||||
|
|
|
@ -35,17 +35,36 @@ public final class InvalidateTokenRequest implements Validatable, ToXContentObje
|
|||
|
||||
private final String accessToken;
|
||||
private final String refreshToken;
|
||||
private final String realmName;
|
||||
private final String username;
|
||||
|
||||
InvalidateTokenRequest(@Nullable String accessToken, @Nullable String refreshToken) {
|
||||
if (Strings.isNullOrEmpty(accessToken)) {
|
||||
if (Strings.isNullOrEmpty(refreshToken)) {
|
||||
throw new IllegalArgumentException("Either access-token or refresh-token is required");
|
||||
this(accessToken, refreshToken, null, null);
|
||||
}
|
||||
|
||||
public InvalidateTokenRequest(@Nullable String accessToken, @Nullable String refreshToken,
|
||||
@Nullable String realmName, @Nullable String username) {
|
||||
if (Strings.hasText(realmName) || Strings.hasText(username)) {
|
||||
if (Strings.hasText(accessToken)) {
|
||||
throw new IllegalArgumentException("access token is not allowed when realm name or username are specified");
|
||||
}
|
||||
if (refreshToken != null) {
|
||||
throw new IllegalArgumentException("refresh token is not allowed when realm name or username are specified");
|
||||
}
|
||||
} else {
|
||||
if (Strings.isNullOrEmpty(accessToken)) {
|
||||
if (Strings.isNullOrEmpty(refreshToken)) {
|
||||
throw new IllegalArgumentException("Either access token or refresh token is required when neither realm name or " +
|
||||
"username are specified");
|
||||
}
|
||||
} else if (Strings.isNullOrEmpty(refreshToken) == false) {
|
||||
throw new IllegalArgumentException("Cannot supply both access token and refresh token");
|
||||
}
|
||||
} else if (Strings.isNullOrEmpty(refreshToken) == false) {
|
||||
throw new IllegalArgumentException("Cannot supply both access-token and refresh-token");
|
||||
}
|
||||
this.accessToken = accessToken;
|
||||
this.refreshToken = refreshToken;
|
||||
this.realmName = realmName;
|
||||
this.username = username;
|
||||
}
|
||||
|
||||
public static InvalidateTokenRequest accessToken(String accessToken) {
|
||||
|
@ -62,6 +81,20 @@ public final class InvalidateTokenRequest implements Validatable, ToXContentObje
|
|||
return new InvalidateTokenRequest(null, refreshToken);
|
||||
}
|
||||
|
||||
public static InvalidateTokenRequest realmTokens(String realmName) {
|
||||
if (Strings.isNullOrEmpty(realmName)) {
|
||||
throw new IllegalArgumentException("realm name is required");
|
||||
}
|
||||
return new InvalidateTokenRequest(null, null, realmName, null);
|
||||
}
|
||||
|
||||
public static InvalidateTokenRequest userTokens(String username) {
|
||||
if (Strings.isNullOrEmpty(username)) {
|
||||
throw new IllegalArgumentException("username is required");
|
||||
}
|
||||
return new InvalidateTokenRequest(null, null, null, username);
|
||||
}
|
||||
|
||||
public String getAccessToken() {
|
||||
return accessToken;
|
||||
}
|
||||
|
@ -70,6 +103,14 @@ public final class InvalidateTokenRequest implements Validatable, ToXContentObje
|
|||
return refreshToken;
|
||||
}
|
||||
|
||||
public String getRealmName() {
|
||||
return realmName;
|
||||
}
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -79,24 +120,28 @@ public final class InvalidateTokenRequest implements Validatable, ToXContentObje
|
|||
if (refreshToken != null) {
|
||||
builder.field("refresh_token", refreshToken);
|
||||
}
|
||||
if (realmName != null) {
|
||||
builder.field("realm_name", realmName);
|
||||
}
|
||||
if (username != null) {
|
||||
builder.field("username", username);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final InvalidateTokenRequest that = (InvalidateTokenRequest) o;
|
||||
return Objects.equals(this.accessToken, that.accessToken) &&
|
||||
Objects.equals(this.refreshToken, that.refreshToken);
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
InvalidateTokenRequest that = (InvalidateTokenRequest) o;
|
||||
return Objects.equals(accessToken, that.accessToken) &&
|
||||
Objects.equals(refreshToken, that.refreshToken) &&
|
||||
Objects.equals(realmName, that.realmName) &&
|
||||
Objects.equals(username, that.username);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(accessToken, refreshToken);
|
||||
return Objects.hash(accessToken, refreshToken, realmName, username);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,56 +19,107 @@
|
|||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* Response when invalidating an OAuth2 token. Returns a
|
||||
* single boolean field for whether the invalidation record was created or updated.
|
||||
* Response when invalidating one or multiple OAuth2 access tokens and refresh tokens. Returns
|
||||
* information concerning how many tokens were invalidated, how many of the tokens that
|
||||
* were attempted to be invalidated were already invalid, and if there were any errors
|
||||
* encountered.
|
||||
*/
|
||||
public final class InvalidateTokenResponse {
|
||||
|
||||
private final boolean created;
|
||||
public static final ParseField CREATED = new ParseField("created");
|
||||
public static final ParseField INVALIDATED_TOKENS = new ParseField("invalidated_tokens");
|
||||
public static final ParseField PREVIOUSLY_INVALIDATED_TOKENS = new ParseField("previously_invalidated_tokens");
|
||||
public static final ParseField ERROR_COUNT = new ParseField("error_count");
|
||||
public static final ParseField ERRORS = new ParseField("error_details");
|
||||
|
||||
public InvalidateTokenResponse(boolean created) {
|
||||
private final boolean created;
|
||||
private final int invalidatedTokens;
|
||||
private final int previouslyInvalidatedTokens;
|
||||
private List<ElasticsearchException> errors;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<InvalidateTokenResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"tokens_invalidation_result", true,
|
||||
// we parse but do not use the count of errors as we implicitly have this in the size of the Exceptions list
|
||||
args -> new InvalidateTokenResponse((boolean) args[0], (int) args[1], (int) args[2], (List<ElasticsearchException>) args[4]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(constructorArg(), CREATED);
|
||||
PARSER.declareInt(constructorArg(), INVALIDATED_TOKENS);
|
||||
PARSER.declareInt(constructorArg(), PREVIOUSLY_INVALIDATED_TOKENS);
|
||||
PARSER.declareInt(constructorArg(), ERROR_COUNT);
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), ERRORS);
|
||||
}
|
||||
|
||||
public InvalidateTokenResponse(boolean created, int invalidatedTokens, int previouslyInvalidatedTokens,
|
||||
@Nullable List<ElasticsearchException> errors) {
|
||||
this.created = created;
|
||||
this.invalidatedTokens = invalidatedTokens;
|
||||
this.previouslyInvalidatedTokens = previouslyInvalidatedTokens;
|
||||
if (null == errors) {
|
||||
this.errors = Collections.emptyList();
|
||||
} else {
|
||||
this.errors = Collections.unmodifiableList(errors);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isCreated() {
|
||||
return created;
|
||||
}
|
||||
|
||||
public int getInvalidatedTokens() {
|
||||
return invalidatedTokens;
|
||||
}
|
||||
|
||||
public int getPreviouslyInvalidatedTokens() {
|
||||
return previouslyInvalidatedTokens;
|
||||
}
|
||||
|
||||
public List<ElasticsearchException> getErrors() {
|
||||
return errors;
|
||||
}
|
||||
|
||||
public int getErrorsCount() {
|
||||
return errors == null ? 0 : errors.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
InvalidateTokenResponse that = (InvalidateTokenResponse) o;
|
||||
return created == that.created;
|
||||
return created == that.created &&
|
||||
invalidatedTokens == that.invalidatedTokens &&
|
||||
previouslyInvalidatedTokens == that.previouslyInvalidatedTokens &&
|
||||
Objects.equals(errors, that.errors);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(created);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<InvalidateTokenResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"invalidate_token_response", true, args -> new InvalidateTokenResponse((boolean) args[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(constructorArg(), new ParseField("created"));
|
||||
return Objects.hash(created, invalidatedTokens, previouslyInvalidatedTokens, errors);
|
||||
}
|
||||
|
||||
public static InvalidateTokenResponse fromXContent(XContentParser parser) throws IOException {
|
||||
if (parser.currentToken() == null) {
|
||||
parser.nextToken();
|
||||
}
|
||||
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -240,12 +240,12 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase {
|
|||
for (int i = 1; i <= numDocs; i++) {
|
||||
if (randomBoolean()) {
|
||||
testDocs++;
|
||||
processor.add(new IndexRequest("test", "_doc", Integer.toString(testDocs))
|
||||
processor.add(new IndexRequest("test").id(Integer.toString(testDocs))
|
||||
.source(XContentType.JSON, "field", "value"));
|
||||
multiGetRequest.add("test", Integer.toString(testDocs));
|
||||
} else {
|
||||
testReadOnlyDocs++;
|
||||
processor.add(new IndexRequest("test-ro", "_doc", Integer.toString(testReadOnlyDocs))
|
||||
processor.add(new IndexRequest("test-ro").id(Integer.toString(testReadOnlyDocs))
|
||||
.source(XContentType.JSON, "field", "value"));
|
||||
}
|
||||
}
|
||||
|
@ -300,7 +300,7 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
processor.add(new IndexRequest() // <1>
|
||||
.source(XContentType.JSON, "user", "some user"));
|
||||
processor.add(new IndexRequest("blogs", "post_type", "1") // <2>
|
||||
processor.add(new IndexRequest("blogs").id("1") // <2>
|
||||
.source(XContentType.JSON, "title", "some title"));
|
||||
}
|
||||
// end::bulk-processor-mix-parameters
|
||||
|
@ -364,7 +364,7 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase {
|
|||
MultiGetRequest multiGetRequest = new MultiGetRequest();
|
||||
for (int i = 1; i <= numDocs; i++) {
|
||||
if (randomBoolean()) {
|
||||
processor.add(new IndexRequest(localIndex, "_doc", Integer.toString(i))
|
||||
processor.add(new IndexRequest(localIndex).id(Integer.toString(i))
|
||||
.source(XContentType.JSON, "field", randomRealisticUnicodeOfLengthBetween(1, 30)));
|
||||
} else {
|
||||
BytesArray data = bytesBulkRequest(localIndex, "_doc", i);
|
||||
|
|
|
@ -48,9 +48,9 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
createFieldAddingPipleine("xyz", "fieldNameXYZ", "valueXYZ");
|
||||
|
||||
BulkRequest request = new BulkRequest();
|
||||
request.add(new IndexRequest("test", "doc", "1")
|
||||
request.add(new IndexRequest("test").id("1")
|
||||
.source(XContentType.JSON, "field", "bulk1"));
|
||||
request.add(new IndexRequest("test", "doc", "2")
|
||||
request.add(new IndexRequest("test").id("2")
|
||||
.source(XContentType.JSON, "field", "bulk2"));
|
||||
request.pipeline("xyz");
|
||||
|
||||
|
@ -67,10 +67,10 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
|
||||
BulkRequest request = new BulkRequest();
|
||||
request.pipeline("globalId");
|
||||
request.add(new IndexRequest("test", "doc", "1")
|
||||
request.add(new IndexRequest("test").id("1")
|
||||
.source(XContentType.JSON, "field", "bulk1")
|
||||
.setPipeline("perIndexId"));
|
||||
request.add(new IndexRequest("test", "doc", "2")
|
||||
request.add(new IndexRequest("test").id("2")
|
||||
.source(XContentType.JSON, "field", "bulk2")
|
||||
.setPipeline("perIndexId"));
|
||||
|
||||
|
@ -91,11 +91,11 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
BulkRequest request = new BulkRequest();
|
||||
request.pipeline("globalId");
|
||||
|
||||
request.add(new IndexRequest("test", "doc", "1")
|
||||
request.add(new IndexRequest("test").id("1")
|
||||
.source(XContentType.JSON, "field", "bulk1")
|
||||
.setPipeline("perIndexId")); // <1>
|
||||
|
||||
request.add(new IndexRequest("test", "doc", "2")
|
||||
request.add(new IndexRequest("test").id("2")
|
||||
.source(XContentType.JSON, "field", "bulk2")); // <2>
|
||||
// end::bulk-request-mix-pipeline
|
||||
bulk(request);
|
||||
|
@ -110,9 +110,9 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
|
||||
public void testGlobalIndex() throws IOException {
|
||||
BulkRequest request = new BulkRequest("global_index", null);
|
||||
request.add(new IndexRequest().type("doc").id("1")
|
||||
request.add(new IndexRequest().id("1")
|
||||
.source(XContentType.JSON, "field", "bulk1"));
|
||||
request.add(new IndexRequest().type("doc").id("2")
|
||||
request.add(new IndexRequest().id("2")
|
||||
.source(XContentType.JSON, "field", "bulk2"));
|
||||
|
||||
bulk(request);
|
||||
|
@ -124,9 +124,9 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
@SuppressWarnings("unchecked")
|
||||
public void testIndexGlobalAndPerRequest() throws IOException {
|
||||
BulkRequest request = new BulkRequest("global_index", null);
|
||||
request.add(new IndexRequest("local_index", "doc", "1")
|
||||
request.add(new IndexRequest("local_index").id("1")
|
||||
.source(XContentType.JSON, "field", "bulk1"));
|
||||
request.add(new IndexRequest().type("doc").id("2") // will take global index
|
||||
request.add(new IndexRequest().id("2") // will take global index
|
||||
.source(XContentType.JSON, "field", "bulk2"));
|
||||
|
||||
bulk(request);
|
||||
|
@ -140,7 +140,7 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
}
|
||||
|
||||
public void testGlobalType() throws IOException {
|
||||
BulkRequest request = new BulkRequest(null, "global_type");
|
||||
BulkRequest request = new BulkRequest(null, "_doc");
|
||||
request.add(new IndexRequest("index").id("1")
|
||||
.source(XContentType.JSON, "field", "bulk1"));
|
||||
request.add(new IndexRequest("index").id("2")
|
||||
|
@ -149,10 +149,11 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
bulk(request);
|
||||
|
||||
Iterable<SearchHit> hits = searchAll("index");
|
||||
assertThat(hits, everyItem(hasType("global_type")));
|
||||
assertThat(hits, everyItem(hasType("_doc")));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/36549")
|
||||
public void testTypeGlobalAndPerRequest() throws IOException {
|
||||
BulkRequest request = new BulkRequest(null, "global_type");
|
||||
request.add(new IndexRequest("index1", "local_type", "1")
|
||||
|
@ -174,9 +175,9 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
public void testGlobalRouting() throws IOException {
|
||||
createIndexWithMultipleShards("index");
|
||||
BulkRequest request = new BulkRequest(null, null);
|
||||
request.add(new IndexRequest("index", "type", "1")
|
||||
request.add(new IndexRequest("index").id("1")
|
||||
.source(XContentType.JSON, "field", "bulk1"));
|
||||
request.add(new IndexRequest("index", "type", "2")
|
||||
request.add(new IndexRequest("index").id("2")
|
||||
.source(XContentType.JSON, "field", "bulk1"));
|
||||
request.routing("1");
|
||||
bulk(request);
|
||||
|
@ -192,9 +193,9 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
public void testMixLocalAndGlobalRouting() throws IOException {
|
||||
BulkRequest request = new BulkRequest(null, null);
|
||||
request.routing("globalRouting");
|
||||
request.add(new IndexRequest("index", "type", "1")
|
||||
request.add(new IndexRequest("index").id("1")
|
||||
.source(XContentType.JSON, "field", "bulk1"));
|
||||
request.add(new IndexRequest("index", "type", "2")
|
||||
request.add(new IndexRequest("index").id( "2")
|
||||
.routing("localRouting")
|
||||
.source(XContentType.JSON, "field", "bulk1"));
|
||||
|
||||
|
|
|
@ -69,6 +69,7 @@ import org.elasticsearch.rest.action.document.RestDeleteAction;
|
|||
import org.elasticsearch.rest.action.document.RestGetAction;
|
||||
import org.elasticsearch.rest.action.document.RestMultiGetAction;
|
||||
import org.elasticsearch.rest.action.document.RestUpdateAction;
|
||||
import org.elasticsearch.rest.action.document.RestIndexAction;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
|
@ -101,7 +102,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
// Testing deletion
|
||||
String docId = "id";
|
||||
highLevelClient().index(
|
||||
new IndexRequest("index", "_doc", docId).source(Collections.singletonMap("foo", "bar")), RequestOptions.DEFAULT);
|
||||
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar")), RequestOptions.DEFAULT);
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", docId);
|
||||
if (randomBoolean()) {
|
||||
deleteRequest.version(1L);
|
||||
|
@ -126,7 +127,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
// Testing version conflict
|
||||
String docId = "version_conflict";
|
||||
highLevelClient().index(
|
||||
new IndexRequest("index", "_doc", docId).source(Collections.singletonMap("foo", "bar")), RequestOptions.DEFAULT);
|
||||
new IndexRequest("index").id( docId).source(Collections.singletonMap("foo", "bar")), RequestOptions.DEFAULT);
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", docId).version(2);
|
||||
ElasticsearchException exception = expectThrows(ElasticsearchException.class,
|
||||
() -> execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync));
|
||||
|
@ -139,7 +140,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
// Testing version type
|
||||
String docId = "version_type";
|
||||
highLevelClient().index(
|
||||
new IndexRequest("index", "_doc", docId).source(Collections.singletonMap("foo", "bar"))
|
||||
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar"))
|
||||
.versionType(VersionType.EXTERNAL).version(12), RequestOptions.DEFAULT);
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", docId).versionType(VersionType.EXTERNAL).version(13);
|
||||
DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
|
||||
|
@ -152,7 +153,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
// Testing version type with a wrong version
|
||||
String docId = "wrong_version";
|
||||
highLevelClient().index(
|
||||
new IndexRequest("index", "_doc", docId).source(Collections.singletonMap("foo", "bar"))
|
||||
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar"))
|
||||
.versionType(VersionType.EXTERNAL).version(12), RequestOptions.DEFAULT);
|
||||
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", docId).versionType(VersionType.EXTERNAL).version(10);
|
||||
|
@ -166,7 +167,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
{
|
||||
// Testing routing
|
||||
String docId = "routing";
|
||||
highLevelClient().index(new IndexRequest("index", "_doc", docId).source(Collections.singletonMap("foo", "bar")).routing("foo"),
|
||||
highLevelClient().index(new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar")).routing("foo"),
|
||||
RequestOptions.DEFAULT);
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", docId).routing("foo");
|
||||
DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
|
||||
|
@ -179,8 +180,13 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
public void testDeleteWithTypes() throws IOException {
|
||||
String docId = "id";
|
||||
highLevelClient().index(new IndexRequest("index", "type", docId)
|
||||
.source(Collections.singletonMap("foo", "bar")), RequestOptions.DEFAULT);
|
||||
IndexRequest indexRequest = new IndexRequest("index", "type", docId);
|
||||
indexRequest.source(Collections.singletonMap("foo", "bar"));
|
||||
execute(indexRequest,
|
||||
highLevelClient()::index,
|
||||
highLevelClient()::indexAsync,
|
||||
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
|
||||
);
|
||||
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId);
|
||||
DeleteResponse deleteResponse = execute(deleteRequest,
|
||||
|
@ -199,7 +205,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
GetRequest getRequest = new GetRequest("index", "id");
|
||||
assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync));
|
||||
}
|
||||
IndexRequest index = new IndexRequest("index", "_doc", "id");
|
||||
IndexRequest index = new IndexRequest("index").id("id");
|
||||
index.source("{\"field1\":\"value1\",\"field2\":\"value2\"}", XContentType.JSON);
|
||||
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
highLevelClient().index(index, RequestOptions.DEFAULT);
|
||||
|
@ -222,7 +228,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
GetRequest getRequest = new GetRequest("index", "id");
|
||||
assertFalse(execute(getRequest, highLevelClient()::existsSource, highLevelClient()::existsSourceAsync));
|
||||
}
|
||||
IndexRequest index = new IndexRequest("index", "_doc", "id");
|
||||
IndexRequest index = new IndexRequest("index").id("id");
|
||||
index.source("{\"field1\":\"value1\",\"field2\":\"value2\"}", XContentType.JSON);
|
||||
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
highLevelClient().index(index, RequestOptions.DEFAULT);
|
||||
|
@ -256,9 +262,9 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
RestStatus.OK,
|
||||
highLevelClient().bulk(
|
||||
new BulkRequest()
|
||||
.add(new IndexRequest(noSourceIndex, "_doc", "1")
|
||||
.add(new IndexRequest(noSourceIndex).id("1")
|
||||
.source(Collections.singletonMap("foo", 1), XContentType.JSON))
|
||||
.add(new IndexRequest(noSourceIndex, "_doc", "2")
|
||||
.add(new IndexRequest(noSourceIndex).id("2")
|
||||
.source(Collections.singletonMap("foo", 2), XContentType.JSON))
|
||||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
|
||||
RequestOptions.DEFAULT
|
||||
|
@ -281,7 +287,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index [index]]", exception.getMessage());
|
||||
assertEquals("index", exception.getMetadata("es.index").get(0));
|
||||
}
|
||||
IndexRequest index = new IndexRequest("index", "_doc", "id");
|
||||
IndexRequest index = new IndexRequest("index").id("id");
|
||||
String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}";
|
||||
index.source(document, XContentType.JSON);
|
||||
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
|
@ -354,10 +360,14 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
public void testGetWithTypes() throws IOException {
|
||||
String document = "{\"field\":\"value\"}";
|
||||
IndexRequest index = new IndexRequest("index", "type", "id");
|
||||
index.source(document, XContentType.JSON);
|
||||
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
highLevelClient().index(index, RequestOptions.DEFAULT);
|
||||
IndexRequest indexRequest = new IndexRequest("index", "type", "id");
|
||||
indexRequest.source(document, XContentType.JSON);
|
||||
indexRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
execute(indexRequest,
|
||||
highLevelClient()::index,
|
||||
highLevelClient()::indexAsync,
|
||||
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
|
||||
);
|
||||
|
||||
GetRequest getRequest = new GetRequest("index", "type", "id");
|
||||
GetResponse getResponse = execute(getRequest,
|
||||
|
@ -401,10 +411,10 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
BulkRequest bulk = new BulkRequest();
|
||||
bulk.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
IndexRequest index = new IndexRequest("index", "_doc", "id1");
|
||||
IndexRequest index = new IndexRequest("index").id("id1");
|
||||
index.source("{\"field\":\"value1\"}", XContentType.JSON);
|
||||
bulk.add(index);
|
||||
index = new IndexRequest("index", "_doc", "id2");
|
||||
index = new IndexRequest("index").id("id2");
|
||||
index.source("{\"field\":\"value2\"}", XContentType.JSON);
|
||||
bulk.add(index);
|
||||
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
|
||||
|
@ -464,7 +474,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
public void testIndex() throws IOException {
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc");
|
||||
IndexRequest indexRequest = new IndexRequest("index");
|
||||
indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("test", "test").endObject());
|
||||
|
||||
IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
|
||||
|
@ -485,7 +495,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(indexResponse.getShardInfo().getTotal() > 0);
|
||||
}
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "id");
|
||||
IndexRequest indexRequest = new IndexRequest("index").id("id");
|
||||
indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("version", 1).endObject());
|
||||
|
||||
IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
|
||||
|
@ -495,7 +505,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals("id", indexResponse.getId());
|
||||
assertEquals(1L, indexResponse.getVersion());
|
||||
|
||||
indexRequest = new IndexRequest("index", "_doc", "id");
|
||||
indexRequest = new IndexRequest("index").id("id");
|
||||
indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("version", 2).endObject());
|
||||
|
||||
indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
|
||||
|
@ -506,7 +516,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(2L, indexResponse.getVersion());
|
||||
|
||||
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
|
||||
IndexRequest wrongRequest = new IndexRequest("index", "_doc", "id");
|
||||
IndexRequest wrongRequest = new IndexRequest("index").id("id");
|
||||
wrongRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
|
||||
wrongRequest.version(5L);
|
||||
|
||||
|
@ -519,7 +529,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
{
|
||||
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "missing_pipeline");
|
||||
IndexRequest indexRequest = new IndexRequest("index").id("missing_pipeline");
|
||||
indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
|
||||
indexRequest.setPipeline("missing");
|
||||
|
||||
|
@ -531,7 +541,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
"reason=pipeline with id [missing] does not exist]", exception.getMessage());
|
||||
}
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "external_version_type");
|
||||
IndexRequest indexRequest = new IndexRequest("index").id("external_version_type");
|
||||
indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
|
||||
indexRequest.version(12L);
|
||||
indexRequest.versionType(VersionType.EXTERNAL);
|
||||
|
@ -544,7 +554,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(12L, indexResponse.getVersion());
|
||||
}
|
||||
{
|
||||
final IndexRequest indexRequest = new IndexRequest("index", "_doc", "with_create_op_type");
|
||||
final IndexRequest indexRequest = new IndexRequest("index").id("with_create_op_type");
|
||||
indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
|
||||
indexRequest.opType(DocWriteRequest.OpType.CREATE);
|
||||
|
||||
|
@ -564,6 +574,22 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testIndexWithTypes() throws IOException {
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
IndexRequest indexRequest = new IndexRequest("index", "some_type", "some_id");
|
||||
indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("test", "test").endObject());
|
||||
IndexResponse indexResponse = execute(
|
||||
indexRequest,
|
||||
highLevelClient()::index,
|
||||
highLevelClient()::indexAsync,
|
||||
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
|
||||
);
|
||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||
assertEquals("index", indexResponse.getIndex());
|
||||
assertEquals("some_type", indexResponse.getType());
|
||||
assertEquals("some_id",indexResponse.getId());
|
||||
}
|
||||
|
||||
public void testUpdate() throws IOException {
|
||||
{
|
||||
UpdateRequest updateRequest = new UpdateRequest("index", "does_not_exist");
|
||||
|
@ -576,7 +602,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
exception.getMessage());
|
||||
}
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "id");
|
||||
IndexRequest indexRequest = new IndexRequest("index").id( "id");
|
||||
indexRequest.source(singletonMap("field", "value"));
|
||||
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||
|
@ -599,7 +625,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
"current version [2] is different than the one provided [1]]", exception.getMessage());
|
||||
}
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "with_script");
|
||||
IndexRequest indexRequest = new IndexRequest("index").id("with_script");
|
||||
indexRequest.source(singletonMap("counter", 12));
|
||||
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||
|
@ -617,7 +643,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
}
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "with_doc");
|
||||
IndexRequest indexRequest = new IndexRequest("index").id("with_doc");
|
||||
indexRequest.source("field_1", "one", "field_3", "three");
|
||||
indexRequest.version(12L);
|
||||
indexRequest.versionType(VersionType.EXTERNAL);
|
||||
|
@ -641,7 +667,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
assertFalse(sourceAsMap.containsKey("field_3"));
|
||||
}
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "noop");
|
||||
IndexRequest indexRequest = new IndexRequest("index").id("noop");
|
||||
indexRequest.source("field", "value");
|
||||
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||
|
@ -724,7 +750,11 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
public void testUpdateWithTypes() throws IOException {
|
||||
IndexRequest indexRequest = new IndexRequest("index", "type", "id");
|
||||
indexRequest.source(singletonMap("field", "value"));
|
||||
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
|
||||
IndexResponse indexResponse = execute(indexRequest,
|
||||
highLevelClient()::index,
|
||||
highLevelClient()::indexAsync,
|
||||
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
|
||||
);
|
||||
|
||||
UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
|
||||
updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values()));
|
||||
|
@ -754,7 +784,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
if (erroneous == false) {
|
||||
assertEquals(RestStatus.CREATED,
|
||||
highLevelClient().index(
|
||||
new IndexRequest("index", "_doc", id).source("field", -1), RequestOptions.DEFAULT).status());
|
||||
new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status());
|
||||
}
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", id);
|
||||
bulkRequest.add(deleteRequest);
|
||||
|
@ -763,14 +793,14 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
BytesReference source = BytesReference.bytes(XContentBuilder.builder(xContentType.xContent())
|
||||
.startObject().field("id", i).endObject());
|
||||
if (opType == DocWriteRequest.OpType.INDEX) {
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", id).source(source, xContentType);
|
||||
IndexRequest indexRequest = new IndexRequest("index").id(id).source(source, xContentType);
|
||||
if (erroneous) {
|
||||
indexRequest.version(12L);
|
||||
}
|
||||
bulkRequest.add(indexRequest);
|
||||
|
||||
} else if (opType == DocWriteRequest.OpType.CREATE) {
|
||||
IndexRequest createRequest = new IndexRequest("index", "_doc", id).source(source, xContentType).create(true);
|
||||
IndexRequest createRequest = new IndexRequest("index").id(id).source(source, xContentType).create(true);
|
||||
if (erroneous) {
|
||||
assertEquals(RestStatus.CREATED, highLevelClient().index(createRequest, RequestOptions.DEFAULT).status());
|
||||
}
|
||||
|
@ -782,7 +812,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
if (erroneous == false) {
|
||||
assertEquals(RestStatus.CREATED,
|
||||
highLevelClient().index(
|
||||
new IndexRequest("index", "_doc", id).source("field", -1), RequestOptions.DEFAULT).status());
|
||||
new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status());
|
||||
}
|
||||
bulkRequest.add(updateRequest);
|
||||
}
|
||||
|
@ -832,9 +862,9 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
RestStatus.OK,
|
||||
highLevelClient().bulk(
|
||||
new BulkRequest()
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "1")
|
||||
.add(new IndexRequest(sourceIndex).id("1")
|
||||
.source(Collections.singletonMap("foo", 1), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "2")
|
||||
.add(new IndexRequest(sourceIndex).id("2")
|
||||
.source(Collections.singletonMap("foo", 2), XContentType.JSON))
|
||||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
|
||||
RequestOptions.DEFAULT
|
||||
|
@ -942,11 +972,11 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
RestStatus.OK,
|
||||
highLevelClient().bulk(
|
||||
new BulkRequest()
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "1")
|
||||
.add(new IndexRequest(sourceIndex).id("1")
|
||||
.source(Collections.singletonMap("foo", 1), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "2")
|
||||
.add(new IndexRequest(sourceIndex).id("2")
|
||||
.source(Collections.singletonMap("foo", 2), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "3")
|
||||
.add(new IndexRequest(sourceIndex).id("3")
|
||||
.source(Collections.singletonMap("foo", 3), XContentType.JSON))
|
||||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
|
||||
RequestOptions.DEFAULT
|
||||
|
@ -1065,21 +1095,21 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
if (erroneous == false) {
|
||||
assertEquals(RestStatus.CREATED,
|
||||
highLevelClient().index(
|
||||
new IndexRequest("index", "_doc", id).source("field", -1), RequestOptions.DEFAULT).status());
|
||||
new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status());
|
||||
}
|
||||
DeleteRequest deleteRequest = new DeleteRequest("index", id);
|
||||
processor.add(deleteRequest);
|
||||
|
||||
} else {
|
||||
if (opType == DocWriteRequest.OpType.INDEX) {
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", id).source(xContentType, "id", i);
|
||||
IndexRequest indexRequest = new IndexRequest("index").id(id).source(xContentType, "id", i);
|
||||
if (erroneous) {
|
||||
indexRequest.version(12L);
|
||||
}
|
||||
processor.add(indexRequest);
|
||||
|
||||
} else if (opType == DocWriteRequest.OpType.CREATE) {
|
||||
IndexRequest createRequest = new IndexRequest("index", "_doc", id).source(xContentType, "id", i).create(true);
|
||||
IndexRequest createRequest = new IndexRequest("index").id(id).source(xContentType, "id", i).create(true);
|
||||
if (erroneous) {
|
||||
assertEquals(RestStatus.CREATED, highLevelClient().index(createRequest, RequestOptions.DEFAULT).status());
|
||||
}
|
||||
|
@ -1091,7 +1121,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
if (erroneous == false) {
|
||||
assertEquals(RestStatus.CREATED,
|
||||
highLevelClient().index(
|
||||
new IndexRequest("index", "_doc", id).source("field", -1), RequestOptions.DEFAULT).status());
|
||||
new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status());
|
||||
}
|
||||
processor.add(updateRequest);
|
||||
}
|
||||
|
@ -1141,7 +1171,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
String expectedIndex = "logstash-" +
|
||||
DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(DateTimeZone.UTC).monthOfYear().roundFloorCopy());
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest(indexPattern, "_doc", "id#1");
|
||||
IndexRequest indexRequest = new IndexRequest(indexPattern).id("id#1");
|
||||
indexRequest.source("field", "value");
|
||||
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(expectedIndex, indexResponse.getIndex());
|
||||
|
@ -1159,7 +1189,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
String docId = "this/is/the/id/中文";
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", docId);
|
||||
IndexRequest indexRequest = new IndexRequest("index").id(docId);
|
||||
indexRequest.source("field", "value");
|
||||
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertEquals("index", indexResponse.getIndex());
|
||||
|
@ -1182,7 +1212,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
//parameters are encoded by the low-level client but let's test that everything works the same when we use the high-level one
|
||||
String routing = "routing/中文value#1?";
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "id");
|
||||
IndexRequest indexRequest = new IndexRequest("index").id("id");
|
||||
indexRequest.source("field", "value");
|
||||
indexRequest.routing(routing);
|
||||
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
|
||||
|
@ -1216,9 +1246,9 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
RestStatus.OK,
|
||||
highLevelClient().bulk(
|
||||
new BulkRequest()
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "1")
|
||||
.add(new IndexRequest(sourceIndex).id("1")
|
||||
.source(Collections.singletonMap("field", "value1"), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "2")
|
||||
.add(new IndexRequest(sourceIndex).id("2")
|
||||
.source(Collections.singletonMap("field", "value2"), XContentType.JSON))
|
||||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
|
||||
RequestOptions.DEFAULT
|
||||
|
@ -1292,9 +1322,9 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
|
|||
RestStatus.OK,
|
||||
highLevelClient().bulk(
|
||||
new BulkRequest()
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "1")
|
||||
.add(new IndexRequest(sourceIndex).id("1")
|
||||
.source(Collections.singletonMap("field", "value1"), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex, "_doc", "2")
|
||||
.add(new IndexRequest(sourceIndex).id("2")
|
||||
.source(Collections.singletonMap("field", "value2"), XContentType.JSON))
|
||||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
|
||||
RequestOptions.DEFAULT
|
||||
|
|
|
@ -41,23 +41,23 @@ public class GraphIT extends ESRestHighLevelClientTestCase {
|
|||
@Before
|
||||
public void indexDocuments() throws IOException {
|
||||
// Create chain of doc IDs across indices 1->2->3
|
||||
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/type/1");
|
||||
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/_doc/1");
|
||||
doc1.setJsonEntity("{ \"num\":[1], \"const\":\"start\"}");
|
||||
client().performRequest(doc1);
|
||||
|
||||
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/type/1");
|
||||
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/1");
|
||||
doc2.setJsonEntity("{\"num\":[1,2], \"const\":\"foo\"}");
|
||||
client().performRequest(doc2);
|
||||
|
||||
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/type/2");
|
||||
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/2");
|
||||
doc3.setJsonEntity("{\"num\":[2,3], \"const\":\"foo\"}");
|
||||
client().performRequest(doc3);
|
||||
|
||||
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2");
|
||||
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/_doc/2");
|
||||
doc4.setJsonEntity("{\"num\":\"string\", \"const\":\"foo\"}");
|
||||
client().performRequest(doc4);
|
||||
|
||||
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/type/2");
|
||||
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/_doc/2");
|
||||
doc5.setJsonEntity("{\"num\":[2,4], \"const\":\"foo\"}");
|
||||
client().performRequest(doc5);
|
||||
|
||||
|
|
|
@ -909,8 +909,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals("test_new", rolloverResponse.getNewIndex());
|
||||
}
|
||||
|
||||
highLevelClient().index(new IndexRequest("test", "type", "1").source("field", "value"), RequestOptions.DEFAULT);
|
||||
highLevelClient().index(new IndexRequest("test", "type", "2").source("field", "value")
|
||||
highLevelClient().index(new IndexRequest("test").id("1").source("field", "value"), RequestOptions.DEFAULT);
|
||||
highLevelClient().index(new IndexRequest("test").id("2").source("field", "value")
|
||||
.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL), RequestOptions.DEFAULT);
|
||||
//without the refresh the rollover may not happen as the number of docs seen may be off
|
||||
|
||||
|
@ -1303,7 +1303,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
String index = "shakespeare";
|
||||
|
||||
createIndex(index, Settings.EMPTY);
|
||||
Request postDoc = new Request(HttpPost.METHOD_NAME, "/" + index + "/1");
|
||||
Request postDoc = new Request(HttpPost.METHOD_NAME, "/" + index + "/_doc");
|
||||
postDoc.setJsonEntity(
|
||||
"{\"type\":\"act\",\"line_id\":1,\"play_name\":\"Henry IV\", \"speech_number\":\"\"," +
|
||||
"\"line_number\":\"\",\"speaker\":\"\",\"text_entry\":\"ACT I\"}");
|
||||
|
|
|
@ -52,13 +52,13 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
@Before
|
||||
public void indexDocuments() throws IOException {
|
||||
Request berlin = new Request("PUT", "/index/doc/berlin");
|
||||
Request berlin = new Request("PUT", "/index/_doc/berlin");
|
||||
berlin.setJsonEntity("{\"text\":\"berlin\"}");
|
||||
client().performRequest(berlin);
|
||||
for (int i = 0; i < 6; i++) {
|
||||
// add another index to test basic multi index support
|
||||
String index = i == 0 ? "index2" : "index";
|
||||
Request amsterdam = new Request("PUT", "/" + index + "/doc/amsterdam" + i);
|
||||
Request amsterdam = new Request("PUT", "/" + index + "/_doc/amsterdam" + i);
|
||||
amsterdam.setJsonEntity("{\"text\":\"amsterdam\"}");
|
||||
client().performRequest(amsterdam);
|
||||
}
|
||||
|
|
|
@ -608,8 +608,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
|
||||
public void testIndex() throws IOException {
|
||||
String index = randomAlphaOfLengthBetween(3, 10);
|
||||
String type = randomAlphaOfLengthBetween(3, 10);
|
||||
IndexRequest indexRequest = new IndexRequest(index, type);
|
||||
IndexRequest indexRequest = new IndexRequest(index);
|
||||
|
||||
String id = randomBoolean() ? randomAlphaOfLengthBetween(3, 10) : null;
|
||||
indexRequest.id(id);
|
||||
|
@ -660,6 +659,50 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
indexRequest.source(builder);
|
||||
}
|
||||
|
||||
Request request = RequestConverters.index(indexRequest);
|
||||
if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) {
|
||||
assertEquals("/" + index + "/_doc/" + id + "/_create", request.getEndpoint());
|
||||
} else if (id != null) {
|
||||
assertEquals("/" + index + "/_doc/" + id, request.getEndpoint());
|
||||
} else {
|
||||
assertEquals("/" + index + "/_doc", request.getEndpoint());
|
||||
}
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertEquals(method, request.getMethod());
|
||||
|
||||
HttpEntity entity = request.getEntity();
|
||||
assertTrue(entity instanceof ByteArrayEntity);
|
||||
assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue());
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) {
|
||||
assertEquals(nbFields, parser.map().size());
|
||||
}
|
||||
}
|
||||
|
||||
public void testIndexWithType() throws IOException {
|
||||
String index = randomAlphaOfLengthBetween(3, 10);
|
||||
String type = randomAlphaOfLengthBetween(3, 10);
|
||||
IndexRequest indexRequest = new IndexRequest(index, type);
|
||||
String id = randomBoolean() ? randomAlphaOfLengthBetween(3, 10) : null;
|
||||
indexRequest.id(id);
|
||||
|
||||
String method = HttpPost.METHOD_NAME;
|
||||
if (id != null) {
|
||||
method = HttpPut.METHOD_NAME;
|
||||
if (randomBoolean()) {
|
||||
indexRequest.opType(DocWriteRequest.OpType.CREATE);
|
||||
}
|
||||
}
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
int nbFields = randomIntBetween(0, 10);
|
||||
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
|
||||
builder.startObject();
|
||||
for (int i = 0; i < nbFields; i++) {
|
||||
builder.field("field_" + i, i);
|
||||
}
|
||||
builder.endObject();
|
||||
indexRequest.source(builder);
|
||||
}
|
||||
|
||||
Request request = RequestConverters.index(indexRequest);
|
||||
if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) {
|
||||
assertEquals("/" + index + "/" + type + "/" + id + "/_create", request.getEndpoint());
|
||||
|
@ -668,7 +711,6 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
} else {
|
||||
assertEquals("/" + index + "/" + type, request.getEndpoint());
|
||||
}
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertEquals(method, request.getMethod());
|
||||
|
||||
HttpEntity entity = request.getEntity();
|
||||
|
|
|
@ -50,6 +50,7 @@ import org.elasticsearch.index.query.TermsQueryBuilder;
|
|||
import org.elasticsearch.join.aggregations.Children;
|
||||
import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.rest.action.document.RestIndexAction;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
|
@ -101,18 +102,23 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
public void indexDocuments() throws IOException {
|
||||
{
|
||||
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1");
|
||||
doc1.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
doc1.setJsonEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}");
|
||||
client().performRequest(doc1);
|
||||
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2");
|
||||
doc2.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
doc2.setJsonEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}");
|
||||
client().performRequest(doc2);
|
||||
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3");
|
||||
doc3.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
doc3.setJsonEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}");
|
||||
client().performRequest(doc3);
|
||||
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4");
|
||||
doc4.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
doc4.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}");
|
||||
client().performRequest(doc4);
|
||||
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5");
|
||||
doc5.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
doc5.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}");
|
||||
client().performRequest(doc5);
|
||||
}
|
||||
|
@ -400,7 +406,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
createIndex.setJsonEntity(
|
||||
"{\n" +
|
||||
" \"mappings\": {\n" +
|
||||
" \"qa\" : {\n" +
|
||||
" \"_doc\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"qa_join_field\" : {\n" +
|
||||
" \"type\" : \"join\",\n" +
|
||||
|
@ -411,7 +417,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
" }" +
|
||||
"}");
|
||||
client().performRequest(createIndex);
|
||||
Request questionDoc = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1");
|
||||
Request questionDoc = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/_doc/1");
|
||||
questionDoc.setJsonEntity(
|
||||
"{\n" +
|
||||
" \"body\": \"<p>I have Windows 2003 server and i bought a new Windows 2008 server...\",\n" +
|
||||
|
@ -424,7 +430,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
" \"qa_join_field\" : \"question\"\n" +
|
||||
"}");
|
||||
client().performRequest(questionDoc);
|
||||
Request answerDoc1 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2");
|
||||
Request answerDoc1 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/_doc/2");
|
||||
answerDoc1.addParameter("routing", "1");
|
||||
answerDoc1.setJsonEntity(
|
||||
"{\n" +
|
||||
|
@ -441,7 +447,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
" \"creation_date\": \"2009-05-04T13:45:37.030\"\n" +
|
||||
"}");
|
||||
client().performRequest(answerDoc1);
|
||||
Request answerDoc2 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3");
|
||||
Request answerDoc2 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/_doc/3");
|
||||
answerDoc2.addParameter("routing", "1");
|
||||
answerDoc2.setJsonEntity(
|
||||
"{\n" +
|
||||
|
@ -535,7 +541,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
|
||||
public void testSearchWithWeirdScriptFields() throws Exception {
|
||||
Request doc = new Request("PUT", "test/type/1");
|
||||
Request doc = new Request("PUT", "test/_doc/1");
|
||||
doc.setJsonEntity("{\"field\":\"value\"}");
|
||||
client().performRequest(doc);
|
||||
client().performRequest(new Request("POST", "/test/_refresh"));
|
||||
|
@ -579,7 +585,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
|
|||
public void testSearchScroll() throws Exception {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject();
|
||||
Request doc = new Request(HttpPut.METHOD_NAME, "/test/type1/" + Integer.toString(i));
|
||||
Request doc = new Request(HttpPut.METHOD_NAME, "/test/_doc/" + Integer.toString(i));
|
||||
doc.setJsonEntity(Strings.toString(builder));
|
||||
client().performRequest(doc);
|
||||
}
|
||||
|
|
|
@ -82,8 +82,8 @@ public class TasksIT extends ESRestHighLevelClientTestCase {
|
|||
createIndex(sourceIndex, settings);
|
||||
createIndex(destinationIndex, settings);
|
||||
BulkRequest bulkRequest = new BulkRequest()
|
||||
.add(new IndexRequest(sourceIndex, "type", "1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex, "type", "2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex).id("1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex).id("2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON))
|
||||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
assertEquals(RestStatus.OK, highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT).status());
|
||||
|
||||
|
@ -111,7 +111,7 @@ public class TasksIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
TaskInfo info = taskResponse.getTaskInfo();
|
||||
assertTrue(info.isCancellable());
|
||||
assertEquals("reindex from [source1] to [dest]", info.getDescription());
|
||||
assertEquals("reindex from [source1] to [dest][_doc]", info.getDescription());
|
||||
assertEquals("indices:data/write/reindex", info.getAction());
|
||||
}
|
||||
|
||||
|
|
|
@ -116,8 +116,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
jsonMap.put("user", "kimchy");
|
||||
jsonMap.put("postDate", new Date());
|
||||
jsonMap.put("message", "trying out Elasticsearch");
|
||||
IndexRequest indexRequest = new IndexRequest("posts", "_doc", "1")
|
||||
.source(jsonMap); // <1>
|
||||
IndexRequest indexRequest = new IndexRequest("posts")
|
||||
.id("1").source(jsonMap); // <1>
|
||||
//end::index-request-map
|
||||
IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
|
||||
|
@ -132,34 +132,33 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
builder.field("message", "trying out Elasticsearch");
|
||||
}
|
||||
builder.endObject();
|
||||
IndexRequest indexRequest = new IndexRequest("posts", "_doc", "1")
|
||||
.source(builder); // <1>
|
||||
IndexRequest indexRequest = new IndexRequest("posts")
|
||||
.id("1").source(builder); // <1>
|
||||
//end::index-request-xcontent
|
||||
IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult());
|
||||
}
|
||||
{
|
||||
//tag::index-request-shortcut
|
||||
IndexRequest indexRequest = new IndexRequest("posts", "_doc", "1")
|
||||
.source("user", "kimchy",
|
||||
"postDate", new Date(),
|
||||
"message", "trying out Elasticsearch"); // <1>
|
||||
IndexRequest indexRequest = new IndexRequest("posts")
|
||||
.id("1")
|
||||
.source("user", "kimchy",
|
||||
"postDate", new Date(),
|
||||
"message", "trying out Elasticsearch"); // <1>
|
||||
//end::index-request-shortcut
|
||||
IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult());
|
||||
}
|
||||
{
|
||||
//tag::index-request-string
|
||||
IndexRequest request = new IndexRequest(
|
||||
"posts", // <1>
|
||||
"_doc", // <2>
|
||||
"1"); // <3>
|
||||
IndexRequest request = new IndexRequest("posts"); // <1>
|
||||
request.id("1"); // <2>
|
||||
String jsonString = "{" +
|
||||
"\"user\":\"kimchy\"," +
|
||||
"\"postDate\":\"2013-01-30\"," +
|
||||
"\"message\":\"trying out Elasticsearch\"" +
|
||||
"}";
|
||||
request.source(jsonString, XContentType.JSON); // <4>
|
||||
request.source(jsonString, XContentType.JSON); // <3>
|
||||
//end::index-request-string
|
||||
|
||||
// tag::index-execute
|
||||
|
@ -169,7 +168,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
// tag::index-response
|
||||
String index = indexResponse.getIndex();
|
||||
String type = indexResponse.getType();
|
||||
String id = indexResponse.getId();
|
||||
long version = indexResponse.getVersion();
|
||||
if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) {
|
||||
|
@ -190,7 +188,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::index-response
|
||||
}
|
||||
{
|
||||
IndexRequest request = new IndexRequest("posts", "_doc", "1");
|
||||
IndexRequest request = new IndexRequest("posts").id("1");
|
||||
// tag::index-request-routing
|
||||
request.routing("routing"); // <1>
|
||||
// end::index-request-routing
|
||||
|
@ -218,9 +216,10 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
{
|
||||
// tag::index-conflict
|
||||
IndexRequest request = new IndexRequest("posts", "_doc", "1")
|
||||
.source("field", "value")
|
||||
.version(1);
|
||||
IndexRequest request = new IndexRequest("posts")
|
||||
.id("1")
|
||||
.source("field", "value")
|
||||
.version(1);
|
||||
try {
|
||||
IndexResponse response = client.index(request, RequestOptions.DEFAULT);
|
||||
} catch(ElasticsearchException e) {
|
||||
|
@ -232,9 +231,10 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
{
|
||||
// tag::index-optype
|
||||
IndexRequest request = new IndexRequest("posts", "_doc", "1")
|
||||
.source("field", "value")
|
||||
.opType(DocWriteRequest.OpType.CREATE);
|
||||
IndexRequest request = new IndexRequest("posts")
|
||||
.id("1")
|
||||
.source("field", "value")
|
||||
.opType(DocWriteRequest.OpType.CREATE);
|
||||
try {
|
||||
IndexResponse response = client.index(request, RequestOptions.DEFAULT);
|
||||
} catch(ElasticsearchException e) {
|
||||
|
@ -245,7 +245,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::index-optype
|
||||
}
|
||||
{
|
||||
IndexRequest request = new IndexRequest("posts", "_doc", "async").source("field", "value");
|
||||
IndexRequest request = new IndexRequest("posts")
|
||||
.id("async")
|
||||
.source("field", "value");
|
||||
ActionListener<IndexResponse> listener;
|
||||
// tag::index-execute-listener
|
||||
listener = new ActionListener<IndexResponse>() {
|
||||
|
@ -277,7 +279,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
public void testUpdate() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("posts", "_doc", "1").source("field", 0);
|
||||
IndexRequest indexRequest = new IndexRequest("posts").id("1").source("field", 0);
|
||||
IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertSame(RestStatus.CREATED, indexResponse.status());
|
||||
|
||||
|
@ -552,7 +554,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest("posts", "_doc", "1").source("field", "value");
|
||||
IndexRequest indexRequest = new IndexRequest("posts").id("1").source("field", "value");
|
||||
IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
|
||||
assertSame(RestStatus.CREATED, indexResponse.status());
|
||||
}
|
||||
|
@ -620,7 +622,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
|
||||
{
|
||||
IndexResponse indexResponse = client.index(new IndexRequest("posts", "_doc", "1").source("field", "value")
|
||||
IndexResponse indexResponse = client.index(new IndexRequest("posts").id("1").source("field", "value")
|
||||
, RequestOptions.DEFAULT);
|
||||
assertSame(RestStatus.CREATED, indexResponse.status());
|
||||
|
||||
|
@ -637,7 +639,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::delete-conflict
|
||||
}
|
||||
{
|
||||
IndexResponse indexResponse = client.index(new IndexRequest("posts", "_doc", "async").source("field", "value"),
|
||||
IndexResponse indexResponse = client.index(new IndexRequest("posts").id("async").source("field", "value"),
|
||||
RequestOptions.DEFAULT);
|
||||
assertSame(RestStatus.CREATED, indexResponse.status());
|
||||
|
||||
|
@ -676,11 +678,11 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
{
|
||||
// tag::bulk-request
|
||||
BulkRequest request = new BulkRequest(); // <1>
|
||||
request.add(new IndexRequest("posts", "_doc", "1") // <2>
|
||||
request.add(new IndexRequest("posts").id("1") // <2>
|
||||
.source(XContentType.JSON,"field", "foo"));
|
||||
request.add(new IndexRequest("posts", "_doc", "2") // <3>
|
||||
request.add(new IndexRequest("posts").id("2") // <3>
|
||||
.source(XContentType.JSON,"field", "bar"));
|
||||
request.add(new IndexRequest("posts", "_doc", "3") // <4>
|
||||
request.add(new IndexRequest("posts").id("3") // <4>
|
||||
.source(XContentType.JSON,"field", "baz"));
|
||||
// end::bulk-request
|
||||
// tag::bulk-execute
|
||||
|
@ -695,7 +697,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
request.add(new DeleteRequest("posts", "3")); // <1>
|
||||
request.add(new UpdateRequest("posts", "2") // <2>
|
||||
.doc(XContentType.JSON,"other", "test"));
|
||||
request.add(new IndexRequest("posts", "_doc", "4") // <3>
|
||||
request.add(new IndexRequest("posts").id("4") // <3>
|
||||
.source(XContentType.JSON,"field", "baz"));
|
||||
// end::bulk-request-with-mixed-operations
|
||||
BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
|
||||
|
@ -1244,7 +1246,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
Response response = client().performRequest(createIndex);
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
|
||||
IndexRequest indexRequest = new IndexRequest("posts", "_doc", "1")
|
||||
IndexRequest indexRequest = new IndexRequest("posts").id("1")
|
||||
.source("user", "kimchy",
|
||||
"postDate", new Date(),
|
||||
"message", "trying out Elasticsearch");
|
||||
|
@ -1472,13 +1474,13 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertNotNull(bulkProcessor);
|
||||
|
||||
// tag::bulk-processor-add
|
||||
IndexRequest one = new IndexRequest("posts", "_doc", "1").
|
||||
source(XContentType.JSON, "title",
|
||||
IndexRequest one = new IndexRequest("posts").id("1")
|
||||
.source(XContentType.JSON, "title",
|
||||
"In which order are my Elasticsearch queries executed?");
|
||||
IndexRequest two = new IndexRequest("posts", "_doc", "2")
|
||||
IndexRequest two = new IndexRequest("posts").id("2")
|
||||
.source(XContentType.JSON, "title",
|
||||
"Current status and upcoming changes in Elasticsearch");
|
||||
IndexRequest three = new IndexRequest("posts", "_doc", "3")
|
||||
IndexRequest three = new IndexRequest("posts").id("3")
|
||||
.source(XContentType.JSON, "title",
|
||||
"The Future of Federated Search in Elasticsearch");
|
||||
|
||||
|
@ -1546,7 +1548,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
CreateIndexRequest authorsRequest = new CreateIndexRequest("authors").mapping("_doc", "user", "type=keyword");
|
||||
CreateIndexResponse authorsResponse = client.indices().create(authorsRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(authorsResponse.isAcknowledged());
|
||||
client.index(new IndexRequest("index", "_doc", "1").source("user", "kimchy"), RequestOptions.DEFAULT);
|
||||
client.index(new IndexRequest("index").id("1").source("user", "kimchy"), RequestOptions.DEFAULT);
|
||||
Response refreshResponse = client().performRequest(new Request("POST", "/authors/_refresh"));
|
||||
assertEquals(200, refreshResponse.getStatusLine().getStatusCode());
|
||||
|
||||
|
@ -1670,8 +1672,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
CreateIndexRequest authorsRequest = new CreateIndexRequest("authors").mapping("_doc", "user", "type=text");
|
||||
CreateIndexResponse authorsResponse = client.indices().create(authorsRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(authorsResponse.isAcknowledged());
|
||||
client.index(new IndexRequest("index", "_doc", "1").source("user", "kimchy"), RequestOptions.DEFAULT);
|
||||
client.index(new IndexRequest("index", "_doc", "2").source("user", "s1monw"), RequestOptions.DEFAULT);
|
||||
client.index(new IndexRequest("index").id("1").source("user", "kimchy"), RequestOptions.DEFAULT);
|
||||
client.index(new IndexRequest("index").id("2").source("user", "s1monw"), RequestOptions.DEFAULT);
|
||||
Response refreshResponse = client().performRequest(new Request("POST", "/authors/_refresh"));
|
||||
assertEquals(200, refreshResponse.getStatusLine().getStatusCode());
|
||||
|
||||
|
@ -1764,7 +1766,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
source.put("foo", "val1");
|
||||
source.put("bar", "val2");
|
||||
source.put("baz", "val3");
|
||||
client.index(new IndexRequest("index", "_doc", "example_id")
|
||||
client.index(new IndexRequest("index")
|
||||
.id("example_id")
|
||||
.source(source)
|
||||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
|
||||
|
||||
|
|
|
@ -43,11 +43,11 @@ public class GraphDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
@Before
|
||||
public void indexDocuments() throws IOException {
|
||||
// Create chain of doc IDs across indices 1->2->3
|
||||
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/type/1");
|
||||
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/_doc/1");
|
||||
doc1.setJsonEntity("{ \"participants\":[1,2], \"text\":\"let's start projectx\", \"attachment_md5\":\"324FHDGHFDG4564\"}");
|
||||
client().performRequest(doc1);
|
||||
|
||||
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/type/2");
|
||||
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/2");
|
||||
doc2.setJsonEntity("{\"participants\":[2,3,4], \"text\":\"got something you both may be interested in\"}");
|
||||
client().performRequest(doc2);
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ public class MigrationDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
//tag::migration-request-ctor
|
||||
IndexRequest request = new IndexRequest("index", "_doc", "id"); // <1>
|
||||
IndexRequest request = new IndexRequest("index").id("id"); // <1>
|
||||
request.source("{\"field\":\"value\"}", XContentType.JSON);
|
||||
//end::migration-request-ctor
|
||||
|
||||
|
|
|
@ -1497,13 +1497,13 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "_doc");
|
||||
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
|
||||
indexRequest.source("{\"job_id\":\"test-get-overall-buckets-1\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
|
||||
"\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 60.0}", XContentType.JSON);
|
||||
bulkRequest.add(indexRequest);
|
||||
}
|
||||
{
|
||||
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "_doc");
|
||||
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
|
||||
indexRequest.source("{\"job_id\":\"test-get-overall-buckets-2\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
|
||||
"\"bucket_span\": 3600,\"is_interim\": false, \"anomaly_score\": 100.0}", XContentType.JSON);
|
||||
bulkRequest.add(indexRequest);
|
||||
|
|
|
@ -305,11 +305,11 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
BulkRequest request = new BulkRequest();
|
||||
request.add(new IndexRequest("posts", "_doc", "1")
|
||||
request.add(new IndexRequest("posts").id("1")
|
||||
.source(XContentType.JSON, "company", "Elastic", "age", 20));
|
||||
request.add(new IndexRequest("posts", "_doc", "2")
|
||||
request.add(new IndexRequest("posts").id("2")
|
||||
.source(XContentType.JSON, "company", "Elastic", "age", 30));
|
||||
request.add(new IndexRequest("posts", "_doc", "3")
|
||||
request.add(new IndexRequest("posts").id("3")
|
||||
.source(XContentType.JSON, "company", "Elastic", "age", 40));
|
||||
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
|
||||
|
@ -381,10 +381,10 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
BulkRequest request = new BulkRequest();
|
||||
request.add(new IndexRequest("posts", "_doc", "1").source(XContentType.JSON, "user", "kimchy"));
|
||||
request.add(new IndexRequest("posts", "_doc", "2").source(XContentType.JSON, "user", "javanna"));
|
||||
request.add(new IndexRequest("posts", "_doc", "3").source(XContentType.JSON, "user", "tlrx"));
|
||||
request.add(new IndexRequest("posts", "_doc", "4").source(XContentType.JSON, "user", "cbuescher"));
|
||||
request.add(new IndexRequest("posts").id("1").source(XContentType.JSON, "user", "kimchy"));
|
||||
request.add(new IndexRequest("posts").id("2").source(XContentType.JSON, "user", "javanna"));
|
||||
request.add(new IndexRequest("posts").id("3").source(XContentType.JSON, "user", "tlrx"));
|
||||
request.add(new IndexRequest("posts").id("4").source(XContentType.JSON, "user", "cbuescher"));
|
||||
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
|
||||
assertSame(RestStatus.OK, bulkResponse.status());
|
||||
|
@ -424,13 +424,13 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
BulkRequest request = new BulkRequest();
|
||||
request.add(new IndexRequest("posts", "_doc", "1")
|
||||
request.add(new IndexRequest("posts").id("1")
|
||||
.source(XContentType.JSON, "title", "In which order are my Elasticsearch queries executed?", "user",
|
||||
Arrays.asList("kimchy", "luca"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
request.add(new IndexRequest("posts", "_doc", "2")
|
||||
request.add(new IndexRequest("posts").id("2")
|
||||
.source(XContentType.JSON, "title", "Current status and upcoming changes in Elasticsearch", "user",
|
||||
Arrays.asList("kimchy", "christoph"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
request.add(new IndexRequest("posts", "_doc", "3")
|
||||
request.add(new IndexRequest("posts").id("3")
|
||||
.source(XContentType.JSON, "title", "The Future of Federated Search in Elasticsearch", "user",
|
||||
Arrays.asList("kimchy", "tanguy"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
|
@ -487,7 +487,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
public void testSearchRequestProfiling() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
IndexRequest request = new IndexRequest("posts", "_doc", "1")
|
||||
IndexRequest request = new IndexRequest("posts").id("1")
|
||||
.source(XContentType.JSON, "tags", "elasticsearch", "comments", 123);
|
||||
request.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL);
|
||||
IndexResponse indexResponse = client.index(request, RequestOptions.DEFAULT);
|
||||
|
@ -559,11 +559,11 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
BulkRequest request = new BulkRequest();
|
||||
request.add(new IndexRequest("posts", "_doc", "1")
|
||||
request.add(new IndexRequest("posts").id("1")
|
||||
.source(XContentType.JSON, "title", "In which order are my Elasticsearch queries executed?"));
|
||||
request.add(new IndexRequest("posts", "_doc", "2")
|
||||
request.add(new IndexRequest("posts").id("2")
|
||||
.source(XContentType.JSON, "title", "Current status and upcoming changes in Elasticsearch"));
|
||||
request.add(new IndexRequest("posts", "_doc", "3")
|
||||
request.add(new IndexRequest("posts").id("3")
|
||||
.source(XContentType.JSON, "title", "The Future of Federated Search in Elasticsearch"));
|
||||
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
|
||||
|
@ -1259,19 +1259,19 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(reviewersResponse.isAcknowledged());
|
||||
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(new IndexRequest("posts", "_doc", "1")
|
||||
bulkRequest.add(new IndexRequest("posts").id("1")
|
||||
.source(XContentType.JSON, "title", "In which order are my Elasticsearch queries executed?", "user",
|
||||
Arrays.asList("kimchy", "luca"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
bulkRequest.add(new IndexRequest("posts", "_doc", "2")
|
||||
bulkRequest.add(new IndexRequest("posts").id("2")
|
||||
.source(XContentType.JSON, "title", "Current status and upcoming changes in Elasticsearch", "user",
|
||||
Arrays.asList("kimchy", "christoph"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
bulkRequest.add(new IndexRequest("posts", "_doc", "3")
|
||||
bulkRequest.add(new IndexRequest("posts").id("3")
|
||||
.source(XContentType.JSON, "title", "The Future of Federated Search in Elasticsearch", "user",
|
||||
Arrays.asList("kimchy", "tanguy"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
|
||||
bulkRequest.add(new IndexRequest("authors", "_doc", "1")
|
||||
bulkRequest.add(new IndexRequest("authors").id("1")
|
||||
.source(XContentType.JSON, "user", "kimchy"));
|
||||
bulkRequest.add(new IndexRequest("contributors", "_doc", "1")
|
||||
bulkRequest.add(new IndexRequest("contributors").id("1")
|
||||
.source(XContentType.JSON, "user", "tanguy"));
|
||||
|
||||
|
||||
|
@ -1373,17 +1373,17 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(authorsResponse.isAcknowledged());
|
||||
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(new IndexRequest("blog", "_doc", "1")
|
||||
bulkRequest.add(new IndexRequest("blog").id("1")
|
||||
.source(XContentType.JSON, "title", "Doubling Down on Open?", "user",
|
||||
Collections.singletonList("kimchy"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
bulkRequest.add(new IndexRequest("blog", "_doc", "2")
|
||||
bulkRequest.add(new IndexRequest("blog").id("2")
|
||||
.source(XContentType.JSON, "title", "Swiftype Joins Forces with Elastic", "user",
|
||||
Arrays.asList("kimchy", "matt"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
bulkRequest.add(new IndexRequest("blog", "_doc", "3")
|
||||
bulkRequest.add(new IndexRequest("blog").id("3")
|
||||
.source(XContentType.JSON, "title", "On Net Neutrality", "user",
|
||||
Arrays.asList("tyler", "kimchy"), "innerObject", Collections.singletonMap("key", "value")));
|
||||
|
||||
bulkRequest.add(new IndexRequest("author", "_doc", "1")
|
||||
bulkRequest.add(new IndexRequest("author").id("1")
|
||||
.source(XContentType.JSON, "user", "kimchy"));
|
||||
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.client.documentation;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
|
@ -1324,19 +1325,52 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
String accessToken;
|
||||
String refreshToken;
|
||||
{
|
||||
// Setup user
|
||||
// Setup users
|
||||
final char[] password = "password".toCharArray();
|
||||
User invalidate_token_user = new User("invalidate_token", Collections.singletonList("kibana_user"));
|
||||
PutUserRequest putUserRequest = new PutUserRequest(invalidate_token_user, password, true, RefreshPolicy.IMMEDIATE);
|
||||
User user = new User("user", Collections.singletonList("kibana_user"));
|
||||
PutUserRequest putUserRequest = new PutUserRequest(user, password, true, RefreshPolicy.IMMEDIATE);
|
||||
PutUserResponse putUserResponse = client.security().putUser(putUserRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(putUserResponse.isCreated());
|
||||
|
||||
User this_user = new User("this_user", Collections.singletonList("kibana_user"));
|
||||
PutUserRequest putThisUserRequest = new PutUserRequest(this_user, password, true, RefreshPolicy.IMMEDIATE);
|
||||
PutUserResponse putThisUserResponse = client.security().putUser(putThisUserRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(putThisUserResponse.isCreated());
|
||||
|
||||
User that_user = new User("that_user", Collections.singletonList("kibana_user"));
|
||||
PutUserRequest putThatUserRequest = new PutUserRequest(that_user, password, true, RefreshPolicy.IMMEDIATE);
|
||||
PutUserResponse putThatUserResponse = client.security().putUser(putThatUserRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(putThatUserResponse.isCreated());
|
||||
|
||||
User other_user = new User("other_user", Collections.singletonList("kibana_user"));
|
||||
PutUserRequest putOtherUserRequest = new PutUserRequest(other_user, password, true, RefreshPolicy.IMMEDIATE);
|
||||
PutUserResponse putOtherUserResponse = client.security().putUser(putOtherUserRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(putOtherUserResponse.isCreated());
|
||||
|
||||
User extra_user = new User("extra_user", Collections.singletonList("kibana_user"));
|
||||
PutUserRequest putExtraUserRequest = new PutUserRequest(extra_user, password, true, RefreshPolicy.IMMEDIATE);
|
||||
PutUserResponse putExtraUserResponse = client.security().putUser(putExtraUserRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(putExtraUserResponse.isCreated());
|
||||
|
||||
// Create tokens
|
||||
final CreateTokenRequest createTokenRequest = CreateTokenRequest.passwordGrant("invalidate_token", password);
|
||||
final CreateTokenRequest createTokenRequest = CreateTokenRequest.passwordGrant("user", password);
|
||||
final CreateTokenResponse tokenResponse = client.security().createToken(createTokenRequest, RequestOptions.DEFAULT);
|
||||
accessToken = tokenResponse.getAccessToken();
|
||||
refreshToken = tokenResponse.getRefreshToken();
|
||||
final CreateTokenRequest createThisTokenRequest = CreateTokenRequest.passwordGrant("this_user", password);
|
||||
final CreateTokenResponse thisTokenResponse = client.security().createToken(createThisTokenRequest, RequestOptions.DEFAULT);
|
||||
assertNotNull(thisTokenResponse);
|
||||
final CreateTokenRequest createThatTokenRequest = CreateTokenRequest.passwordGrant("that_user", password);
|
||||
final CreateTokenResponse thatTokenResponse = client.security().createToken(createThatTokenRequest, RequestOptions.DEFAULT);
|
||||
assertNotNull(thatTokenResponse);
|
||||
final CreateTokenRequest createOtherTokenRequest = CreateTokenRequest.passwordGrant("other_user", password);
|
||||
final CreateTokenResponse otherTokenResponse = client.security().createToken(createOtherTokenRequest, RequestOptions.DEFAULT);
|
||||
assertNotNull(otherTokenResponse);
|
||||
final CreateTokenRequest createExtraTokenRequest = CreateTokenRequest.passwordGrant("extra_user", password);
|
||||
final CreateTokenResponse extraTokenResponse = client.security().createToken(createExtraTokenRequest, RequestOptions.DEFAULT);
|
||||
assertNotNull(extraTokenResponse);
|
||||
}
|
||||
|
||||
{
|
||||
// tag::invalidate-access-token-request
|
||||
InvalidateTokenRequest invalidateTokenRequest = InvalidateTokenRequest.accessToken(accessToken);
|
||||
|
@ -1348,15 +1382,54 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::invalidate-token-execute
|
||||
|
||||
// tag::invalidate-token-response
|
||||
boolean isCreated = invalidateTokenResponse.isCreated();
|
||||
final List<ElasticsearchException> errors = invalidateTokenResponse.getErrors();
|
||||
final int invalidatedTokens = invalidateTokenResponse.getInvalidatedTokens();
|
||||
final int previouslyInvalidatedTokens = invalidateTokenResponse.getPreviouslyInvalidatedTokens();
|
||||
// end::invalidate-token-response
|
||||
assertTrue(isCreated);
|
||||
assertTrue(errors.isEmpty());
|
||||
assertThat(invalidatedTokens, equalTo(1));
|
||||
assertThat(previouslyInvalidatedTokens, equalTo(0));
|
||||
}
|
||||
|
||||
{
|
||||
// tag::invalidate-refresh-token-request
|
||||
InvalidateTokenRequest invalidateTokenRequest = InvalidateTokenRequest.refreshToken(refreshToken);
|
||||
// end::invalidate-refresh-token-request
|
||||
InvalidateTokenResponse invalidateTokenResponse =
|
||||
client.security().invalidateToken(invalidateTokenRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(invalidateTokenResponse.getErrors().isEmpty());
|
||||
assertThat(invalidateTokenResponse.getInvalidatedTokens(), equalTo(1));
|
||||
assertThat(invalidateTokenResponse.getPreviouslyInvalidatedTokens(), equalTo(0));
|
||||
}
|
||||
|
||||
{
|
||||
// tag::invalidate-user-tokens-request
|
||||
InvalidateTokenRequest invalidateTokenRequest = InvalidateTokenRequest.userTokens("other_user");
|
||||
// end::invalidate-user-tokens-request
|
||||
InvalidateTokenResponse invalidateTokenResponse =
|
||||
client.security().invalidateToken(invalidateTokenRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(invalidateTokenResponse.getErrors().isEmpty());
|
||||
// We have one refresh and one access token for that user
|
||||
assertThat(invalidateTokenResponse.getInvalidatedTokens(), equalTo(2));
|
||||
assertThat(invalidateTokenResponse.getPreviouslyInvalidatedTokens(), equalTo(0));
|
||||
}
|
||||
|
||||
{
|
||||
// tag::invalidate-user-realm-tokens-request
|
||||
InvalidateTokenRequest invalidateTokenRequest = new InvalidateTokenRequest(null, null, "default_native", "extra_user");
|
||||
// end::invalidate-user-realm-tokens-request
|
||||
InvalidateTokenResponse invalidateTokenResponse =
|
||||
client.security().invalidateToken(invalidateTokenRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(invalidateTokenResponse.getErrors().isEmpty());
|
||||
// We have one refresh and one access token for that user in this realm
|
||||
assertThat(invalidateTokenResponse.getInvalidatedTokens(), equalTo(2));
|
||||
assertThat(invalidateTokenResponse.getPreviouslyInvalidatedTokens(), equalTo(0));
|
||||
}
|
||||
|
||||
{
|
||||
// tag::invalidate-realm-tokens-request
|
||||
InvalidateTokenRequest invalidateTokenRequest = InvalidateTokenRequest.realmTokens("default_native");
|
||||
// end::invalidate-realm-tokens-request
|
||||
|
||||
ActionListener<InvalidateTokenResponse> listener;
|
||||
//tag::invalidate-token-execute-listener
|
||||
|
@ -1386,8 +1459,10 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
final InvalidateTokenResponse response = future.get(30, TimeUnit.SECONDS);
|
||||
assertNotNull(response);
|
||||
assertTrue(response.isCreated());// technically, this should be false, but the API is broken
|
||||
// See https://github.com/elastic/elasticsearch/issues/35115
|
||||
assertTrue(response.getErrors().isEmpty());
|
||||
//We still have 4 tokens ( 2 access_tokens and 2 refresh_tokens ) for the default_native realm
|
||||
assertThat(response.getInvalidatedTokens(), equalTo(4));
|
||||
assertThat(response.getPreviouslyInvalidatedTokens(), equalTo(0));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -49,17 +49,66 @@ public class InvalidateTokenRequestTests extends ESTestCase {
|
|||
));
|
||||
}
|
||||
|
||||
public void testInvalidateRealmTokens() {
|
||||
String realmName = "native";
|
||||
final InvalidateTokenRequest request = InvalidateTokenRequest.realmTokens(realmName);
|
||||
assertThat(request.getAccessToken(), nullValue());
|
||||
assertThat(request.getRefreshToken(), nullValue());
|
||||
assertThat(request.getRealmName(), equalTo(realmName));
|
||||
assertThat(request.getUsername(), nullValue());
|
||||
assertThat(Strings.toString(request), equalTo("{" +
|
||||
"\"realm_name\":\"native\"" +
|
||||
"}"
|
||||
));
|
||||
}
|
||||
|
||||
public void testInvalidateUserTokens() {
|
||||
String username = "user";
|
||||
final InvalidateTokenRequest request = InvalidateTokenRequest.userTokens(username);
|
||||
assertThat(request.getAccessToken(), nullValue());
|
||||
assertThat(request.getRefreshToken(), nullValue());
|
||||
assertThat(request.getRealmName(), nullValue());
|
||||
assertThat(request.getUsername(), equalTo(username));
|
||||
assertThat(Strings.toString(request), equalTo("{" +
|
||||
"\"username\":\"user\"" +
|
||||
"}"
|
||||
));
|
||||
}
|
||||
|
||||
public void testInvalidateUserTokensInRealm() {
|
||||
String username = "user";
|
||||
String realmName = "native";
|
||||
final InvalidateTokenRequest request = new InvalidateTokenRequest(null, null, realmName, username);
|
||||
assertThat(request.getAccessToken(), nullValue());
|
||||
assertThat(request.getRefreshToken(), nullValue());
|
||||
assertThat(request.getRealmName(), equalTo(realmName));
|
||||
assertThat(request.getUsername(), equalTo(username));
|
||||
assertThat(Strings.toString(request), equalTo("{" +
|
||||
"\"realm_name\":\"native\"," +
|
||||
"\"username\":\"user\"" +
|
||||
|
||||
"}"
|
||||
));
|
||||
}
|
||||
|
||||
public void testEqualsAndHashCode() {
|
||||
final String token = randomAlphaOfLength(8);
|
||||
final boolean accessToken = randomBoolean();
|
||||
final InvalidateTokenRequest request = accessToken ? InvalidateTokenRequest.accessToken(token)
|
||||
: InvalidateTokenRequest.refreshToken(token);
|
||||
final EqualsHashCodeTestUtils.MutateFunction<InvalidateTokenRequest> mutate = r -> {
|
||||
if (randomBoolean()) {
|
||||
return accessToken ? InvalidateTokenRequest.refreshToken(token) : InvalidateTokenRequest.accessToken(token);
|
||||
} else {
|
||||
return accessToken ? InvalidateTokenRequest.accessToken(randomAlphaOfLength(10))
|
||||
: InvalidateTokenRequest.refreshToken(randomAlphaOfLength(10));
|
||||
int randomCase = randomIntBetween(1, 4);
|
||||
switch (randomCase) {
|
||||
case 1:
|
||||
return InvalidateTokenRequest.refreshToken(randomAlphaOfLength(5));
|
||||
case 2:
|
||||
return InvalidateTokenRequest.accessToken(randomAlphaOfLength(5));
|
||||
case 3:
|
||||
return InvalidateTokenRequest.realmTokens(randomAlphaOfLength(5));
|
||||
case 4:
|
||||
return InvalidateTokenRequest.userTokens(randomAlphaOfLength(5));
|
||||
default:
|
||||
return new InvalidateTokenRequest(null, null, randomAlphaOfLength(5), randomAlphaOfLength(5));
|
||||
}
|
||||
};
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(request,
|
||||
|
|
|
@ -18,7 +18,9 @@
|
|||
*/
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -28,23 +30,66 @@ import org.hamcrest.Matchers;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class InvalidateTokenResponseTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
final boolean created = randomBoolean();
|
||||
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
final XContentBuilder builder = XContentFactory.contentBuilder(xContentType);
|
||||
final int invalidatedTokens = randomInt(32);
|
||||
final int previouslyInvalidatedTokens = randomInt(32);
|
||||
builder.startObject()
|
||||
.field("created", created)
|
||||
.field("created", false)
|
||||
.field("invalidated_tokens", invalidatedTokens)
|
||||
.field("previously_invalidated_tokens", previouslyInvalidatedTokens)
|
||||
.field("error_count", 0)
|
||||
.endObject();
|
||||
BytesReference xContent = BytesReference.bytes(builder);
|
||||
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), xContent)) {
|
||||
final InvalidateTokenResponse response = InvalidateTokenResponse.fromXContent(parser);
|
||||
assertThat(response.isCreated(), Matchers.equalTo(created));
|
||||
assertThat(response.isCreated(), Matchers.equalTo(false));
|
||||
assertThat(response.getInvalidatedTokens(), Matchers.equalTo(invalidatedTokens));
|
||||
assertThat(response.getPreviouslyInvalidatedTokens(), Matchers.equalTo(previouslyInvalidatedTokens));
|
||||
assertThat(response.getErrorsCount(), Matchers.equalTo(0));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testFromXContentWithErrors() throws IOException {
|
||||
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
final XContentBuilder builder = XContentFactory.contentBuilder(xContentType);
|
||||
final int invalidatedTokens = randomInt(32);
|
||||
final int previouslyInvalidatedTokens = randomInt(32);
|
||||
builder.startObject()
|
||||
.field("created", false)
|
||||
.field("invalidated_tokens", invalidatedTokens)
|
||||
.field("previously_invalidated_tokens", previouslyInvalidatedTokens)
|
||||
.field("error_count", 0)
|
||||
.startArray("error_details")
|
||||
.startObject();
|
||||
ElasticsearchException.generateThrowableXContent(builder, ToXContent.EMPTY_PARAMS, new ElasticsearchException("foo",
|
||||
new IllegalArgumentException("bar")));
|
||||
builder.endObject().startObject();
|
||||
ElasticsearchException.generateThrowableXContent(builder, ToXContent.EMPTY_PARAMS, new ElasticsearchException("boo",
|
||||
new IllegalArgumentException("far")));
|
||||
builder.endObject()
|
||||
.endArray()
|
||||
.endObject();
|
||||
BytesReference xContent = BytesReference.bytes(builder);
|
||||
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), xContent)) {
|
||||
final InvalidateTokenResponse response = InvalidateTokenResponse.fromXContent(parser);
|
||||
assertThat(response.isCreated(), Matchers.equalTo(false));
|
||||
assertThat(response.getInvalidatedTokens(), Matchers.equalTo(invalidatedTokens));
|
||||
assertThat(response.getPreviouslyInvalidatedTokens(), Matchers.equalTo(previouslyInvalidatedTokens));
|
||||
assertThat(response.getErrorsCount(), Matchers.equalTo(2));
|
||||
assertThat(response.getErrors().get(0).toString(), containsString("type=exception, reason=foo"));
|
||||
assertThat(response.getErrors().get(0).toString(), containsString("type=illegal_argument_exception, reason=bar"));
|
||||
assertThat(response.getErrors().get(1).toString(), containsString("type=exception, reason=boo"));
|
||||
assertThat(response.getErrors().get(1).toString(), containsString("type=illegal_argument_exception, reason=far"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -80,12 +80,12 @@ public class NodeRestUsageIT extends ESRestTestCase {
|
|||
// Do some requests to get some rest usage stats
|
||||
client().performRequest(new Request("PUT", "/test"));
|
||||
for (int i = 0; i < 3; i++) {
|
||||
final Request index = new Request("POST", "/test/doc/1");
|
||||
final Request index = new Request("POST", "/test/_doc/1");
|
||||
index.setJsonEntity("{\"foo\": \"bar\"}");
|
||||
client().performRequest(index);
|
||||
}
|
||||
client().performRequest(new Request("GET", "/test/_search"));
|
||||
final Request index4 = new Request("POST", "/test/doc/4");
|
||||
final Request index4 = new Request("POST", "/test/_doc/4");
|
||||
index4.setJsonEntity("{\"foo\": \"bar\"}");
|
||||
client().performRequest(index4);
|
||||
client().performRequest(new Request("POST", "/test/_refresh"));
|
||||
|
|
|
@ -17,9 +17,8 @@ An +{request}+ requires the following arguments:
|
|||
include-tagged::{doc-tests-file}[{api}-request-string]
|
||||
--------------------------------------------------
|
||||
<1> Index
|
||||
<2> Type
|
||||
<3> Document id
|
||||
<4> Document source provided as a `String`
|
||||
<2> Document id for the request
|
||||
<3> Document source provided as a `String`
|
||||
|
||||
==== Providing the document source
|
||||
The document source can be provided in different ways in addition to the
|
||||
|
@ -124,7 +123,7 @@ include-tagged::{doc-tests-file}[{api}-conflict]
|
|||
<1> The raised exception indicates that a version conflict error was returned
|
||||
|
||||
Same will happen in case `opType` was set to `create` and a document with
|
||||
same index, type and id already existed:
|
||||
same index and id already existed:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -124,7 +124,7 @@ When request constructors are used, like in the following example:
|
|||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MigrationDocumentationIT.java[migration-request-ctor]
|
||||
--------------------------------------------------
|
||||
<1> Create an `IndexRequest` using its constructor
|
||||
<1> Create an `IndexRequest` using its constructor and id() setter.
|
||||
|
||||
The migration is very simple. The execution using the `TransportClient`:
|
||||
|
||||
|
|
|
@ -9,29 +9,63 @@
|
|||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Invalidate Token Request
|
||||
The +{request}+ supports invalidating either an _access token_ or a _refresh token_
|
||||
The +{request}+ supports invalidating
|
||||
|
||||
===== Access Token
|
||||
. A specific token, that can be either an _access token_ or a _refresh token_
|
||||
|
||||
. All tokens (both _access tokens_ and _refresh tokens_) for a specific realm
|
||||
|
||||
. All tokens (both _access tokens_ and _refresh tokens_) for a specific user
|
||||
|
||||
. All tokens (both _access tokens_ and _refresh tokens_) for a specific user in a specific realm
|
||||
|
||||
===== Specific access token
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[invalidate-access-token-request]
|
||||
--------------------------------------------------
|
||||
|
||||
===== Refresh Token
|
||||
===== Specific refresh token
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[invalidate-refresh-token-request]
|
||||
--------------------------------------------------
|
||||
|
||||
===== All tokens for realm
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[invalidate-realm-tokens-request]
|
||||
--------------------------------------------------
|
||||
|
||||
===== All tokens for user
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[invalidate-user-tokens-request]
|
||||
--------------------------------------------------
|
||||
|
||||
===== All tokens for user in realm
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[invalidate-user-realm-tokens-request]
|
||||
--------------------------------------------------
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Invalidate Token Response
|
||||
|
||||
The returned +{response}+ contains a single property:
|
||||
The returned +{response}+ contains the information regarding the tokens that the request
|
||||
invalidated.
|
||||
|
||||
`created`:: Whether the invalidation record was newly created (`true`),
|
||||
or if the token had already been invalidated (`false`).
|
||||
`invalidatedTokens`:: Available using `getInvalidatedTokens` denotes the number of tokens
|
||||
that this request invalidated.
|
||||
|
||||
`previouslyInvalidatedTokens`:: Available using `getPreviouslyInvalidatedTokens` denotes
|
||||
the number of tokens that this request attempted to invalidate
|
||||
but were already invalid.
|
||||
|
||||
`errors`:: Available using `getErrors` contains possible errors that were encountered while
|
||||
attempting to invalidate specific tokens.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -71,6 +71,15 @@ Advance settings include:
|
|||
to a file configured with protected words (one on each line).
|
||||
Automatically resolves to `config/` based location if exists.
|
||||
|
||||
`adjust_offsets`::
|
||||
By default, the filter tries to output subtokens with adjusted offsets
|
||||
to reflect their actual position in the token stream. However, when
|
||||
used in combination with other filters that alter the length or starting
|
||||
position of tokens without changing their offsets
|
||||
(e.g. <<analysis-trim-tokenfilter,`trim`>>) this can cause tokens with
|
||||
illegal offsets to be emitted. Setting `adjust_offsets` to false will
|
||||
stop `word_delimiter_graph` from adjusting these internal offsets.
|
||||
|
||||
`type_table`::
|
||||
A custom type mapping table, for example (when configured
|
||||
using `type_table_path`):
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
|
||||
IMPORTANT: See <<removal-of-types>>.
|
||||
|
||||
The index API adds or updates a typed JSON document in a specific index,
|
||||
The index API adds or updates a JSON document in a specific index,
|
||||
making it searchable. The following example inserts the JSON document
|
||||
into the "twitter" index, under a type called `_doc` with an id of 1:
|
||||
into the "twitter" index with an id of 1:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -60,13 +60,13 @@ The index operation automatically creates an index if it has not been
|
|||
created before (check out the
|
||||
<<indices-create-index,create index API>> for manually
|
||||
creating an index), and also automatically creates a
|
||||
dynamic type mapping for the specific type if one has not yet been
|
||||
dynamic mapping if one has not yet been
|
||||
created (check out the <<indices-put-mapping,put mapping>>
|
||||
API for manually creating a type mapping).
|
||||
API for manually creating a mapping).
|
||||
|
||||
The mapping itself is very flexible and is schema-free. New fields and
|
||||
objects will automatically be added to the mapping definition of the
|
||||
type specified. Check out the <<mapping,mapping>>
|
||||
objects will automatically be added to the mapping definition.
|
||||
Check out the <<mapping,mapping>>
|
||||
section for more information on mapping definitions.
|
||||
|
||||
Automatic index creation can be disabled by setting
|
||||
|
|
|
@ -55,6 +55,7 @@ public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFac
|
|||
private final byte[] charTypeTable;
|
||||
private final int flags;
|
||||
private final CharArraySet protoWords;
|
||||
private final boolean adjustOffsets;
|
||||
|
||||
public WordDelimiterGraphTokenFilterFactory(IndexSettings indexSettings, Environment env,
|
||||
String name, Settings settings) {
|
||||
|
@ -95,11 +96,12 @@ public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFac
|
|||
Set<?> protectedWords = Analysis.getWordSet(env, settings, "protected_words");
|
||||
this.protoWords = protectedWords == null ? null : CharArraySet.copy(protectedWords);
|
||||
this.flags = flags;
|
||||
this.adjustOffsets = settings.getAsBoolean("adjust_offsets", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream tokenStream) {
|
||||
return new WordDelimiterGraphFilter(tokenStream, true, charTypeTable, flags, protoWords);
|
||||
return new WordDelimiterGraphFilter(tokenStream, adjustOffsets, charTypeTable, flags, protoWords);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -76,10 +76,35 @@ public class WordDelimiterGraphTokenFilterFactoryTests
|
|||
String source = "PowerShot";
|
||||
int[] expectedIncr = new int[]{1, 0, 1};
|
||||
int[] expectedPosLen = new int[]{2, 1, 1};
|
||||
int[] expectedStartOffsets = new int[]{0, 0, 5};
|
||||
int[] expectedEndOffsets = new int[]{9, 5, 9};
|
||||
String[] expected = new String[]{"PowerShot", "Power", "Shot" };
|
||||
Tokenizer tokenizer = new WhitespaceTokenizer();
|
||||
tokenizer.setReader(new StringReader(source));
|
||||
assertTokenStreamContents(tokenFilter.create(tokenizer), expected, null, null, null,
|
||||
assertTokenStreamContents(tokenFilter.create(tokenizer), expected, expectedStartOffsets, expectedEndOffsets, null,
|
||||
expectedIncr, expectedPosLen, null);
|
||||
}
|
||||
|
||||
public void testAdjustingOffsets() throws IOException {
|
||||
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
|
||||
Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put("index.analysis.filter.my_word_delimiter.type", type)
|
||||
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
|
||||
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
|
||||
.put("index.analysis.filter.my_word_delimiter.adjust_offsets", "false")
|
||||
.build(),
|
||||
new CommonAnalysisPlugin());
|
||||
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
|
||||
String source = "PowerShot";
|
||||
int[] expectedIncr = new int[]{1, 0, 1};
|
||||
int[] expectedPosLen = new int[]{2, 1, 1};
|
||||
int[] expectedStartOffsets = new int[]{0, 0, 0};
|
||||
int[] expectedEndOffsets = new int[]{9, 9, 9};
|
||||
String[] expected = new String[]{"PowerShot", "Power", "Shot" };
|
||||
Tokenizer tokenizer = new WhitespaceTokenizer();
|
||||
tokenizer.setReader(new StringReader(source));
|
||||
assertTokenStreamContents(tokenFilter.create(tokenizer), expected, expectedStartOffsets, expectedEndOffsets, null,
|
||||
expectedIncr, expectedPosLen, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -157,6 +157,26 @@
|
|||
- match: { tokens.2.token: brown }
|
||||
- match: { tokens.3.token: fox }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: the qu1ck brown fox
|
||||
tokenizer: standard
|
||||
filter:
|
||||
- type: word_delimiter_graph
|
||||
adjust_offsets: false
|
||||
- length: { tokens: 6 }
|
||||
- match: { tokens.0.token: the }
|
||||
- match: { tokens.1.token: qu }
|
||||
- match: { tokens.1.start_offset: 4 }
|
||||
- match: { tokens.1.end_offset: 9 }
|
||||
- match: { tokens.2.token: "1" }
|
||||
- match: { tokens.2.start_offset: 4 }
|
||||
- match: { tokens.2.end_offset: 9 }
|
||||
- match: { tokens.3.token: ck }
|
||||
- match: { tokens.3.start_offset: 4 }
|
||||
- match: { tokens.3.end_offset: 9 }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
|
|
|
@ -46,7 +46,7 @@ public class ReindexFailureTests extends ReindexTestCase {
|
|||
* conflict on every request.
|
||||
*/
|
||||
indexRandom(true,
|
||||
client().prepareIndex("dest", "test", "test").setSource("test", 10) /* Its a string in the source! */);
|
||||
client().prepareIndex("dest", "_doc", "test").setSource("test", 10) /* Its a string in the source! */);
|
||||
|
||||
indexDocs(100);
|
||||
|
||||
|
@ -70,7 +70,7 @@ public class ReindexFailureTests extends ReindexTestCase {
|
|||
public void testAbortOnVersionConflict() throws Exception {
|
||||
// Just put something in the way of the copy.
|
||||
indexRandom(true,
|
||||
client().prepareIndex("dest", "test", "1").setSource("test", "test"));
|
||||
client().prepareIndex("dest", "_doc", "1").setSource("test", "test"));
|
||||
|
||||
indexDocs(100);
|
||||
|
||||
|
@ -81,7 +81,7 @@ public class ReindexFailureTests extends ReindexTestCase {
|
|||
BulkByScrollResponse response = copy.get();
|
||||
assertThat(response, matcher().batches(1).versionConflicts(1).failures(1).created(99));
|
||||
for (Failure failure: response.getBulkFailures()) {
|
||||
assertThat(failure.getMessage(), containsString("VersionConflictEngineException[[test]["));
|
||||
assertThat(failure.getMessage(), containsString("VersionConflictEngineException[[_doc]["));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -131,7 +131,7 @@ public class ReindexFailureTests extends ReindexTestCase {
|
|||
private void indexDocs(int count) throws Exception {
|
||||
List<IndexRequestBuilder> docs = new ArrayList<>(count);
|
||||
for (int i = 0; i < count; i++) {
|
||||
docs.add(client().prepareIndex("source", "test", Integer.toString(i)).setSource("test", "words words"));
|
||||
docs.add(client().prepareIndex("source", "_doc", Integer.toString(i)).setSource("test", "words words"));
|
||||
}
|
||||
indexRandom(true, docs);
|
||||
}
|
||||
|
|
|
@ -113,18 +113,18 @@ public class ReindexVersioningTests extends ReindexTestCase {
|
|||
}
|
||||
|
||||
private void setupSourceAbsent() throws Exception {
|
||||
indexRandom(true, client().prepareIndex("source", "test", "test").setVersionType(EXTERNAL)
|
||||
indexRandom(true, client().prepareIndex("source", "_doc", "test").setVersionType(EXTERNAL)
|
||||
.setVersion(SOURCE_VERSION).setSource("foo", "source"));
|
||||
|
||||
assertEquals(SOURCE_VERSION, client().prepareGet("source", "test", "test").get().getVersion());
|
||||
assertEquals(SOURCE_VERSION, client().prepareGet("source", "_doc", "test").get().getVersion());
|
||||
}
|
||||
|
||||
private void setupDest(int version) throws Exception {
|
||||
setupSourceAbsent();
|
||||
indexRandom(true, client().prepareIndex("dest", "test", "test").setVersionType(EXTERNAL)
|
||||
indexRandom(true, client().prepareIndex("dest", "_doc", "test").setVersionType(EXTERNAL)
|
||||
.setVersion(version).setSource("foo", "dest"));
|
||||
|
||||
assertEquals(version, client().prepareGet("dest", "test", "test").get().getVersion());
|
||||
assertEquals(version, client().prepareGet("dest", "_doc", "test").get().getVersion());
|
||||
}
|
||||
|
||||
private void setupDestOlder() throws Exception {
|
||||
|
@ -136,7 +136,7 @@ public class ReindexVersioningTests extends ReindexTestCase {
|
|||
}
|
||||
|
||||
private void assertDest(String fooValue, int version) {
|
||||
GetResponse get = client().prepareGet("dest", "test", "test").get();
|
||||
GetResponse get = client().prepareGet("dest", "_doc", "test").get();
|
||||
assertEquals(fooValue, get.getSource().get("foo"));
|
||||
assertEquals(version, get.getVersion());
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
|
@ -38,13 +38,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
index:
|
||||
index: dest
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
|
@ -79,7 +79,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
|
@ -136,13 +136,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
index:
|
||||
index: dest
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
|
@ -162,12 +162,11 @@
|
|||
- match: {version_conflicts: 1}
|
||||
- match: {batches: 1}
|
||||
- match: {failures.0.index: dest}
|
||||
- match: {failures.0.type: foo}
|
||||
- match: {failures.0.id: "1"}
|
||||
- match: {failures.0.status: 409}
|
||||
- match: {failures.0.cause.type: version_conflict_engine_exception}
|
||||
# Use a regex so we don't mind if the version isn't always 1. Sometimes it comes out 2.
|
||||
- match: {failures.0.cause.reason: "/\\[foo\\]\\[1\\]:.version.conflict,.document.already.exists.\\(current.version.\\[\\d+\\]\\)/"}
|
||||
- match: {failures.0.cause.reason: "/\\[_doc\\]\\[1\\]:.version.conflict,.document.already.exists.\\(current.version.\\[\\d+\\]\\)/"}
|
||||
- match: {failures.0.cause.shard: /\d+/}
|
||||
- match: {failures.0.cause.index: dest}
|
||||
- gte: { took: 0 }
|
||||
|
@ -184,13 +183,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
index:
|
||||
index: dest
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
|
@ -225,7 +224,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: {}
|
||||
- do:
|
||||
|
@ -243,7 +242,7 @@
|
|||
- do:
|
||||
get:
|
||||
index: dest
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
- match: { _source: {} }
|
||||
|
||||
|
@ -288,7 +287,7 @@
|
|||
- do:
|
||||
get:
|
||||
index: dest
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
- match: { _source.text: "test" }
|
||||
- is_false: _source.filtered
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: src
|
||||
type: test
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "company": "cat" }
|
||||
- do:
|
||||
|
@ -22,7 +22,7 @@
|
|||
- do:
|
||||
get:
|
||||
index: dest
|
||||
type: test
|
||||
type: _doc
|
||||
id: 1
|
||||
routing: cat
|
||||
- match: { _routing: cat }
|
||||
|
@ -32,7 +32,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: src
|
||||
type: test
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "company": "cat" }
|
||||
routing: null
|
||||
|
@ -52,6 +52,6 @@
|
|||
- do:
|
||||
get:
|
||||
index: dest
|
||||
type: test
|
||||
type: _doc
|
||||
id: 1
|
||||
- is_false: _routing
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: src
|
||||
type: test
|
||||
type: _doc
|
||||
id: 1
|
||||
body: {"text": "test"}
|
||||
- do:
|
||||
|
@ -43,5 +43,5 @@
|
|||
- do:
|
||||
get:
|
||||
index: dest
|
||||
type: test
|
||||
type: _doc
|
||||
id: 1
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
|
@ -38,13 +38,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "user": "blort" }
|
||||
- do:
|
||||
|
@ -89,13 +89,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "user": "foo" }
|
||||
- do:
|
||||
|
@ -118,7 +118,7 @@
|
|||
- do:
|
||||
get:
|
||||
index: new_twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
routing: kimchy
|
||||
- match: { _routing: kimchy }
|
||||
|
@ -126,7 +126,7 @@
|
|||
- do:
|
||||
get:
|
||||
index: new_twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 2
|
||||
routing: foo
|
||||
- match: { _routing: foo }
|
||||
|
@ -136,13 +136,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "user": "foo" }
|
||||
- do:
|
||||
|
@ -192,13 +192,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "user": "foo" }
|
||||
- do:
|
||||
|
@ -227,7 +227,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
version: 1
|
||||
version_type: external
|
||||
|
@ -235,7 +235,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: new_twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
version: 1
|
||||
version_type: external
|
||||
|
@ -273,13 +273,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
index:
|
||||
index: new_twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
|
@ -314,13 +314,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "user": "another" }
|
||||
- do:
|
||||
|
@ -366,39 +366,39 @@
|
|||
- do:
|
||||
index:
|
||||
index: index1
|
||||
type: type1
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "lang": "en", "id": 123 }
|
||||
- do:
|
||||
index:
|
||||
index: index1
|
||||
type: type1
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "lang": "en", "id": 456 }
|
||||
- do:
|
||||
index:
|
||||
index: index1
|
||||
type: type1
|
||||
type: _doc
|
||||
id: 3
|
||||
body: { "lang": "fr", "id": 789 }
|
||||
# Destination index
|
||||
- do:
|
||||
index:
|
||||
index: index2
|
||||
type: type2
|
||||
type: _doc
|
||||
id: fr_789
|
||||
body: { "lang": "fr", "id": 789 }
|
||||
- do:
|
||||
index:
|
||||
index: index2
|
||||
type: type2
|
||||
type: _doc
|
||||
id: en_123
|
||||
body: { "lang": "en", "id": 123 }
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
# Reindex all documents from "index1" into "index2", changing their type
|
||||
# to "type2" and their id to the concatened lang+id fields,
|
||||
# Reindex all documents from "index1" into "index2", changing
|
||||
# their id to the concatenated lang+id fields,
|
||||
# trashing all non-english pre existing ones
|
||||
- do:
|
||||
reindex:
|
||||
|
@ -408,7 +408,6 @@
|
|||
index: index1
|
||||
dest:
|
||||
index: index2
|
||||
type: type2
|
||||
script:
|
||||
lang: painless
|
||||
source: "ctx._id = ctx._source.lang + '_' + ctx._source.id;
|
||||
|
@ -422,25 +421,22 @@
|
|||
mget:
|
||||
body:
|
||||
docs:
|
||||
- { _index: index2, _type: type2, _id: en_123}
|
||||
- { _index: index2, _type: type2, _id: en_456}
|
||||
- { _index: index2, _type: type2, _id: fr_789}
|
||||
- { _index: index2, _type: _doc, _id: en_123}
|
||||
- { _index: index2, _type: _doc, _id: en_456}
|
||||
- { _index: index2, _type: _doc, _id: fr_789}
|
||||
|
||||
- is_true: docs.0.found
|
||||
- match: { docs.0._index: index2 }
|
||||
- match: { docs.0._type: type2 }
|
||||
- match: { docs.0._id: en_123 }
|
||||
- match: { docs.0._version: 2 }
|
||||
|
||||
- is_true: docs.1.found
|
||||
- match: { docs.1._index: index2 }
|
||||
- match: { docs.1._type: type2 }
|
||||
- match: { docs.1._id: en_456 }
|
||||
- match: { docs.1._version: 1 }
|
||||
|
||||
- is_false: docs.2.found
|
||||
- match: { docs.2._index: index2 }
|
||||
- match: { docs.2._type: type2 }
|
||||
- match: { docs.2._id: fr_789 }
|
||||
|
||||
---
|
||||
|
@ -448,7 +444,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: twitter
|
||||
type: tweet
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "user": "kimchy" }
|
||||
- do:
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
refresh: true
|
||||
|
@ -59,13 +59,13 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "text": "test2" }
|
||||
- do:
|
||||
|
@ -116,7 +116,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
routing: foo
|
||||
|
@ -169,7 +169,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
refresh: true
|
||||
|
@ -227,14 +227,14 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
refresh: true
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "text": "test" }
|
||||
refresh: true
|
||||
|
@ -291,7 +291,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
refresh: true
|
||||
|
@ -323,7 +323,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
refresh: true
|
||||
|
@ -345,7 +345,7 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test", "filtered": "removed" }
|
||||
refresh: true
|
||||
|
@ -385,7 +385,7 @@
|
|||
- do:
|
||||
get:
|
||||
index: dest
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
- match: { _source.text: "test" }
|
||||
- is_false: _source.filtered
|
||||
|
@ -404,19 +404,19 @@
|
|||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 1
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 2
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
index:
|
||||
index: source
|
||||
type: foo
|
||||
type: _doc
|
||||
id: 3
|
||||
body: { "text": "test" }
|
||||
- do:
|
||||
|
|
|
@ -135,7 +135,7 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase {
|
|||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
restHighLevelClient.index(
|
||||
new IndexRequest("index", "doc", String.valueOf(i)).source("field", "value"), RequestOptions.DEFAULT);
|
||||
new IndexRequest("index").id(String.valueOf(i)).source("field", "value"), RequestOptions.DEFAULT);
|
||||
}
|
||||
Response refreshResponse = client().performRequest(new Request("POST", "/index/_refresh"));
|
||||
assertEquals(200, refreshResponse.getStatusLine().getStatusCode());
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.seqno.SeqNoStats;
|
||||
import org.elasticsearch.rest.action.document.RestGetAction;
|
||||
import org.elasticsearch.rest.action.document.RestIndexAction;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.ObjectPath;
|
||||
|
||||
|
@ -48,6 +49,7 @@ public class IndexingIT extends ESRestTestCase {
|
|||
final int id = idStart + i;
|
||||
Request request = new Request("PUT", index + "/doc/" + id);
|
||||
request.setJsonEntity("{\"test\": \"test_" + randomAlphaOfLength(2) + "\"}");
|
||||
request.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
assertOK(client().performRequest(request));
|
||||
}
|
||||
return numDocs;
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.rest.action.document.RestIndexAction;
|
||||
import org.elasticsearch.test.rest.yaml.ObjectPath;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -89,6 +90,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
final int id = idStart + i;
|
||||
Request indexDoc = new Request("PUT", index + "/test/" + id);
|
||||
indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}");
|
||||
indexDoc.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
client().performRequest(indexDoc);
|
||||
}
|
||||
return numDocs;
|
||||
|
|
|
@ -52,7 +52,7 @@ public class HttpCompressionIT extends ESRestTestCase {
|
|||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
assertNull(response.getHeader(HttpHeaders.CONTENT_ENCODING));
|
||||
|
||||
Request request = new Request("POST", "/company/employees/1");
|
||||
Request request = new Request("POST", "/company/_doc/1");
|
||||
request.setJsonEntity(SAMPLE_DOCUMENT);
|
||||
response = client().performRequest(request);
|
||||
assertEquals(201, response.getStatusLine().getStatusCode());
|
||||
|
|
|
@ -43,6 +43,14 @@
|
|||
"type" : "time",
|
||||
"description" : "Explicit operation timeout"
|
||||
},
|
||||
"if_seq_no" : {
|
||||
"type" : "number",
|
||||
"description" : "only perform the delete operation if the last operation that has changed the document has the specified sequence number"
|
||||
},
|
||||
"if_primary_term" : {
|
||||
"type" : "number",
|
||||
"description" : "only perform the delete operation if the last operation that has changed the document has the specified primary term"
|
||||
},
|
||||
"version" : {
|
||||
"type" : "number",
|
||||
"description" : "Explicit version number for concurrency control"
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html",
|
||||
"methods": ["POST", "PUT"],
|
||||
"url": {
|
||||
"path": "/{index}/{type}",
|
||||
"path": "/{index}/_doc",
|
||||
"paths": ["/{index}/{type}", "/{index}/{type}/{id}", "/{index}/_doc/{id}", "/{index}/_doc"],
|
||||
"parts": {
|
||||
"id": {
|
||||
|
@ -57,6 +57,14 @@
|
|||
"options" : ["internal", "external", "external_gte", "force"],
|
||||
"description" : "Specific version type"
|
||||
},
|
||||
"if_seq_no" : {
|
||||
"type" : "number",
|
||||
"description" : "only perform the index operation if the last operation that has changed the document has the specified sequence number"
|
||||
},
|
||||
"if_primary_term" : {
|
||||
"type" : "number",
|
||||
"description" : "only perform the index operation if the last operation that has changed the document has the specified primary term"
|
||||
},
|
||||
"pipeline" : {
|
||||
"type" : "string",
|
||||
"description" : "The pipeline id to preprocess incoming documents with"
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
---
|
||||
"Compare And Swap Sequence Numbers":
|
||||
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: cas ops are introduced in 7.0.0
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
- match: { _version: 1}
|
||||
- set: { _seq_no: seqno }
|
||||
- set: { _primary_term: primary_term }
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
id: 1
|
||||
- match: { _seq_no: $seqno }
|
||||
- match: { _primary_term: $primary_term }
|
||||
|
||||
- do:
|
||||
catch: conflict
|
||||
index:
|
||||
index: test_1
|
||||
id: 1
|
||||
if_seq_no: 10000
|
||||
if_primary_term: $primary_term
|
||||
body: { foo: bar2 }
|
||||
|
||||
- do:
|
||||
catch: conflict
|
||||
index:
|
||||
index: test_1
|
||||
id: 1
|
||||
if_seq_no: $seqno
|
||||
if_primary_term: 1000
|
||||
body: { foo: bar2 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
id: 1
|
||||
if_seq_no: $seqno
|
||||
if_primary_term: $primary_term
|
||||
body: { foo: bar2 }
|
||||
|
||||
- match: { _version: 2 }
|
|
@ -78,8 +78,8 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
private static final ParseField RETRY_ON_CONFLICT = new ParseField("retry_on_conflict");
|
||||
private static final ParseField PIPELINE = new ParseField("pipeline");
|
||||
private static final ParseField SOURCE = new ParseField("_source");
|
||||
private static final ParseField IF_SEQ_NO_MATCH = new ParseField("if_seq_no_match");
|
||||
private static final ParseField IF_PRIMARY_TERM_MATCH = new ParseField("if_primary_term_match");
|
||||
private static final ParseField IF_SEQ_NO = new ParseField("if_seq_no");
|
||||
private static final ParseField IF_PRIMARY_TERM = new ParseField("if_primary_term");
|
||||
|
||||
/**
|
||||
* Requests that are part of this request. It is only possible to add things that are both {@link ActionRequest}s and
|
||||
|
@ -350,8 +350,8 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
String opType = null;
|
||||
long version = Versions.MATCH_ANY;
|
||||
VersionType versionType = VersionType.INTERNAL;
|
||||
long ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
long ifPrimaryTermMatch = 0;
|
||||
long ifSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
long ifPrimaryTerm = 0;
|
||||
int retryOnConflict = 0;
|
||||
String pipeline = valueOrDefault(defaultPipeline, globalPipeline);
|
||||
|
||||
|
@ -382,10 +382,10 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
version = parser.longValue();
|
||||
} else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
versionType = VersionType.fromString(parser.text());
|
||||
} else if (IF_SEQ_NO_MATCH.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
ifSeqNoMatch = parser.longValue();
|
||||
} else if (IF_PRIMARY_TERM_MATCH.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
ifPrimaryTermMatch = parser.longValue();
|
||||
} else if (IF_SEQ_NO.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
ifSeqNo = parser.longValue();
|
||||
} else if (IF_PRIMARY_TERM.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
ifPrimaryTerm = parser.longValue();
|
||||
} else if (RETRY_ON_CONFLICT.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
retryOnConflict = parser.intValue();
|
||||
} else if (PIPELINE.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
|
@ -414,7 +414,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
|
||||
if ("delete".equals(action)) {
|
||||
add(new DeleteRequest(index, type, id).routing(routing)
|
||||
.version(version).versionType(versionType).setIfMatch(ifSeqNoMatch, ifPrimaryTermMatch), payload);
|
||||
.version(version).versionType(versionType).setIfSeqNo(ifSeqNo).setIfPrimaryTerm(ifPrimaryTerm), payload);
|
||||
} else {
|
||||
nextMarker = findNextMarker(marker, from, data, length);
|
||||
if (nextMarker == -1) {
|
||||
|
@ -427,16 +427,17 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
if ("index".equals(action)) {
|
||||
if (opType == null) {
|
||||
internalAdd(new IndexRequest(index, type, id).routing(routing).version(version).versionType(versionType)
|
||||
.setPipeline(pipeline).ifMatch(ifSeqNoMatch, ifPrimaryTermMatch)
|
||||
.setPipeline(pipeline).setIfSeqNo(ifSeqNo).setIfPrimaryTerm(ifPrimaryTerm)
|
||||
.source(sliceTrimmingCarriageReturn(data, from, nextMarker,xContentType), xContentType), payload);
|
||||
} else {
|
||||
internalAdd(new IndexRequest(index, type, id).routing(routing).version(version).versionType(versionType)
|
||||
.create("create".equals(opType)).setPipeline(pipeline).ifMatch(ifSeqNoMatch, ifPrimaryTermMatch)
|
||||
.create("create".equals(opType)).setPipeline(pipeline)
|
||||
.setIfSeqNo(ifSeqNo).setIfPrimaryTerm(ifPrimaryTerm)
|
||||
.source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType), payload);
|
||||
}
|
||||
} else if ("create".equals(action)) {
|
||||
internalAdd(new IndexRequest(index, type, id).routing(routing).version(version).versionType(versionType)
|
||||
.create(true).setPipeline(pipeline).ifMatch(ifSeqNoMatch, ifPrimaryTermMatch)
|
||||
.create(true).setPipeline(pipeline).setIfSeqNo(ifSeqNo).setIfPrimaryTerm(ifPrimaryTerm)
|
||||
.source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType), payload);
|
||||
} else if ("update".equals(action)) {
|
||||
UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).retryOnConflict(retryOnConflict)
|
||||
|
|
|
@ -462,7 +462,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
executeOnPrimaryWhileHandlingMappingUpdates(context,
|
||||
() ->
|
||||
primary.applyIndexOperationOnPrimary(request.version(), request.versionType(), sourceToParse,
|
||||
request.ifSeqNoMatch(), request.ifPrimaryTermMatch(), request.getAutoGeneratedTimestamp(), request.isRetry()),
|
||||
request.ifSeqNo(), request.ifPrimaryTerm(), request.getAutoGeneratedTimestamp(), request.isRetry()),
|
||||
e -> primary.getFailedIndexResult(e, request.version()),
|
||||
context::markOperationAsExecuted,
|
||||
mapping -> mappingUpdater.updateMappings(mapping, primary.shardId(), request.type()));
|
||||
|
@ -474,7 +474,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
final IndexShard primary = context.getPrimary();
|
||||
executeOnPrimaryWhileHandlingMappingUpdates(context,
|
||||
() -> primary.applyDeleteOperationOnPrimary(request.version(), request.type(), request.id(), request.versionType(),
|
||||
request.ifSeqNoMatch(), request.ifPrimaryTermMatch()),
|
||||
request.ifSeqNo(), request.ifPrimaryTerm()),
|
||||
e -> primary.getFailedDeleteResult(e, request.version()),
|
||||
context::markOperationAsExecuted,
|
||||
mapping -> mappingUpdater.updateMappings(mapping, primary.shardId(), request.type()));
|
||||
|
|
|
@ -58,8 +58,8 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
|
|||
private String routing;
|
||||
private long version = Versions.MATCH_ANY;
|
||||
private VersionType versionType = VersionType.INTERNAL;
|
||||
private long ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
private long ifPrimaryTermMatch = 0;
|
||||
private long ifSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
private long ifPrimaryTerm = 0;
|
||||
|
||||
public DeleteRequest() {
|
||||
}
|
||||
|
@ -116,11 +116,20 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
|
|||
validationException = addValidationError("version type [force] may no longer be used", validationException);
|
||||
}
|
||||
|
||||
if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO && (
|
||||
if (ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO && (
|
||||
versionType != VersionType.INTERNAL || version != Versions.MATCH_ANY
|
||||
)) {
|
||||
validationException = addValidationError("compare and write operations can not use versioning", validationException);
|
||||
}
|
||||
|
||||
if (ifPrimaryTerm == 0 && ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
validationException = addValidationError("ifSeqNo is set, but primary term is [0]", validationException);
|
||||
}
|
||||
if (ifPrimaryTerm != 0 && ifSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
validationException =
|
||||
addValidationError("ifSeqNo is unassigned, but primary term is [" + ifPrimaryTerm + "]", validationException);
|
||||
}
|
||||
|
||||
return validationException;
|
||||
}
|
||||
|
||||
|
@ -203,29 +212,52 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
|
|||
return this;
|
||||
}
|
||||
|
||||
public long ifSeqNoMatch() {
|
||||
return ifSeqNoMatch;
|
||||
/**
|
||||
* If set, only perform this delete request if the document was last modification was assigned this sequence number.
|
||||
* If the document last modification was assigned a different sequence number a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public long ifSeqNo() {
|
||||
return ifSeqNo;
|
||||
}
|
||||
|
||||
public long ifPrimaryTermMatch() {
|
||||
return ifPrimaryTermMatch;
|
||||
/**
|
||||
* If set, only perform this delete request if the document was last modification was assigned this primary term.
|
||||
*
|
||||
* If the document last modification was assigned a different term a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public long ifPrimaryTerm() {
|
||||
return ifPrimaryTerm;
|
||||
}
|
||||
|
||||
public DeleteRequest setIfMatch(long seqNo, long term) {
|
||||
if (term == 0 && seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
throw new IllegalArgumentException("seqNo is set, but primary term is [0]");
|
||||
}
|
||||
if (term != 0 && seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
throw new IllegalArgumentException("seqNo is unassigned, but primary term is [" + term + "]");
|
||||
}
|
||||
/**
|
||||
* only perform this delete request if the document was last modification was assigned the given
|
||||
* sequence number. Must be used in combination with {@link #setIfPrimaryTerm(long)}
|
||||
*
|
||||
* If the document last modification was assigned a different sequence number a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public DeleteRequest setIfSeqNo(long seqNo) {
|
||||
if (seqNo < 0 && seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
throw new IllegalArgumentException("sequence numbers must be non negative. got [" + seqNo + "].");
|
||||
}
|
||||
ifSeqNo = seqNo;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* only perform this delete request if the document was last modification was assigned the given
|
||||
* primary term. Must be used in combination with {@link #setIfSeqNo(long)}
|
||||
*
|
||||
* If the document last modification was assigned a different primary term a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public DeleteRequest setIfPrimaryTerm(long term) {
|
||||
if (term < 0) {
|
||||
throw new IllegalArgumentException("primary term must be non negative. got [" + term + "]");
|
||||
}
|
||||
ifSeqNoMatch = seqNo;
|
||||
ifPrimaryTermMatch = term;
|
||||
ifPrimaryTerm = term;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -251,11 +283,11 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
|
|||
version = in.readLong();
|
||||
versionType = VersionType.fromValue(in.readByte());
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
ifSeqNoMatch = in.readZLong();
|
||||
ifPrimaryTermMatch = in.readVLong();
|
||||
ifSeqNo = in.readZLong();
|
||||
ifPrimaryTerm = in.readVLong();
|
||||
} else {
|
||||
ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
ifPrimaryTermMatch = 0;
|
||||
ifSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
ifPrimaryTerm = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -271,10 +303,10 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
|
|||
out.writeLong(version);
|
||||
out.writeByte(versionType.getValue());
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeZLong(ifSeqNoMatch);
|
||||
out.writeVLong(ifPrimaryTermMatch);
|
||||
} else if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTermMatch != 0) {
|
||||
assert false : "setIfMatch [" + ifSeqNoMatch + "], currentDocTem [" + ifPrimaryTermMatch + "]";
|
||||
out.writeZLong(ifSeqNo);
|
||||
out.writeVLong(ifPrimaryTerm);
|
||||
} else if (ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTerm != 0) {
|
||||
assert false : "setIfMatch [" + ifSeqNo + "], currentDocTem [" + ifPrimaryTerm + "]";
|
||||
throw new IllegalStateException(
|
||||
"sequence number based compare and write is not supported until all nodes are on version 7.0 or higher. " +
|
||||
"Stream version [" + out.getVersion() + "]");
|
||||
|
|
|
@ -82,11 +82,26 @@ public class DeleteRequestBuilder extends ReplicationRequestBuilder<DeleteReques
|
|||
}
|
||||
|
||||
/**
|
||||
* only performs this delete request if the document was last modification was assigned the given
|
||||
* sequence number and primary term
|
||||
* only perform this delete request if the document was last modification was assigned the given
|
||||
* sequence number. Must be used in combination with {@link #setIfPrimaryTerm(long)}
|
||||
*
|
||||
* If the document last modification was assigned a different sequence number a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public DeleteRequestBuilder setIfMatch(long seqNo, long term) {
|
||||
request.setIfMatch(seqNo, term);
|
||||
public DeleteRequestBuilder setIfSeqNo(long seqNo) {
|
||||
request.setIfSeqNo(seqNo);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* only perform this delete request if the document was last modification was assigned the given
|
||||
* primary term. Must be used in combination with {@link #setIfSeqNo(long)}
|
||||
*
|
||||
* If the document last modification was assigned a different term a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public DeleteRequestBuilder setIfPrimaryTerm(long term) {
|
||||
request.setIfPrimaryTerm(term);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -91,7 +91,7 @@ public class GetResponse extends ActionResponse implements Iterable<DocumentFiel
|
|||
}
|
||||
|
||||
/**
|
||||
* The sequence number assigned to the last operation to have changed this document, if found.
|
||||
* The sequence number assigned to the last operation that has changed this document, if found.
|
||||
*/
|
||||
public long getSeqNo() {
|
||||
return getResult.getSeqNo();
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -80,7 +81,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
*/
|
||||
static final int MAX_SOURCE_LENGTH_IN_TOSTRING = 2048;
|
||||
|
||||
private String type;
|
||||
private String type = MapperService.SINGLE_MAPPING_NAME;
|
||||
private String id;
|
||||
@Nullable
|
||||
private String routing;
|
||||
|
@ -105,8 +106,8 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
private long autoGeneratedTimestamp = UNSET_AUTO_GENERATED_TIMESTAMP;
|
||||
|
||||
private boolean isRetry = false;
|
||||
private long ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
private long ifPrimaryTermMatch = 0;
|
||||
private long ifSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
private long ifPrimaryTerm = 0;
|
||||
|
||||
|
||||
public IndexRequest() {
|
||||
|
@ -123,7 +124,9 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
/**
|
||||
* Constructs a new index request against the specific index and type. The
|
||||
* {@link #source(byte[], XContentType)} must be set.
|
||||
* @deprecated Types are in the process of being removed. Use {@link #IndexRequest(String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public IndexRequest(String index, String type) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
|
@ -135,7 +138,10 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
* @param index The index to index into
|
||||
* @param type The type to index into
|
||||
* @param id The id of document
|
||||
*
|
||||
* @deprecated Types are in the process of being removed. Use {@link #IndexRequest(String)} with {@link #id(String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public IndexRequest(String index, String type, String id) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
|
@ -168,7 +174,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
return validationException;
|
||||
}
|
||||
|
||||
if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTermMatch != 0) {
|
||||
if (ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTerm != 0) {
|
||||
validationException = addValidationError("create operations do not support compare and set. use index instead",
|
||||
validationException);
|
||||
return validationException;
|
||||
|
@ -201,11 +207,18 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
validationException = addValidationError("pipeline cannot be an empty string", validationException);
|
||||
}
|
||||
|
||||
if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO && (
|
||||
if (ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO && (
|
||||
versionType != VersionType.INTERNAL || version != Versions.MATCH_ANY
|
||||
)) {
|
||||
validationException = addValidationError("compare and write operations can not use versioning", validationException);
|
||||
}
|
||||
if (ifPrimaryTerm == 0 && ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
validationException = addValidationError("ifSeqNo is set, but primary term is [0]", validationException);
|
||||
}
|
||||
if (ifPrimaryTerm != 0 && ifSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
validationException =
|
||||
addValidationError("ifSeqNo is unassigned, but primary term is [" + ifPrimaryTerm + "]", validationException);
|
||||
}
|
||||
|
||||
return validationException;
|
||||
}
|
||||
|
@ -220,7 +233,9 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
|
||||
/**
|
||||
* The type of the indexed document.
|
||||
* @deprecated Types are in the process of being removed.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public String type() {
|
||||
return type;
|
||||
|
@ -228,7 +243,9 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
|
||||
/**
|
||||
* Sets the type of the indexed document.
|
||||
* @deprecated Types are in the process of being removed.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public IndexRequest type(String type) {
|
||||
this.type = type;
|
||||
|
@ -486,31 +503,53 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
return this;
|
||||
}
|
||||
|
||||
public IndexRequest ifMatch(long seqNo, long term) {
|
||||
if (term == 0 && seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
throw new IllegalArgumentException("seqNo is set, but primary term is [0]");
|
||||
}
|
||||
|
||||
if (term != 0 && seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
throw new IllegalArgumentException("seqNo is unassigned, but primary term is [" + term + "]");
|
||||
}
|
||||
/**
|
||||
* only perform this indexing request if the document was last modification was assigned the given
|
||||
* sequence number. Must be used in combination with {@link #setIfPrimaryTerm(long)}
|
||||
*
|
||||
* If the document last modification was assigned a different sequence number a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public IndexRequest setIfSeqNo(long seqNo) {
|
||||
if (seqNo < 0 && seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
throw new IllegalArgumentException("sequence numbers must be non negative. got [" + seqNo + "].");
|
||||
}
|
||||
if (term < 0) {
|
||||
throw new IllegalArgumentException("primary term must be non negative. got [" + term + "]");
|
||||
}
|
||||
ifSeqNoMatch = seqNo;
|
||||
ifPrimaryTermMatch = term;
|
||||
ifSeqNo = seqNo;
|
||||
return this;
|
||||
}
|
||||
|
||||
public long ifSeqNoMatch() {
|
||||
return ifSeqNoMatch;
|
||||
/**
|
||||
* only performs this indexing request if the document was last modification was assigned the given
|
||||
* primary term. Must be used in combination with {@link #setIfSeqNo(long)}
|
||||
*
|
||||
* If the document last modification was assigned a different term a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public IndexRequest setIfPrimaryTerm(long term) {
|
||||
if (term < 0) {
|
||||
throw new IllegalArgumentException("primary term must be non negative. got [" + term + "]");
|
||||
}
|
||||
ifPrimaryTerm = term;
|
||||
return this;
|
||||
}
|
||||
|
||||
public long ifPrimaryTermMatch() {
|
||||
return ifPrimaryTermMatch;
|
||||
/**
|
||||
* If set, only perform this indexing request if the document was last modification was assigned this sequence number.
|
||||
* If the document last modification was assigned a different sequence number a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public long ifSeqNo() {
|
||||
return ifSeqNo;
|
||||
}
|
||||
|
||||
/**
|
||||
* If set, only perform this indexing request if the document was last modification was assigned this primary term.
|
||||
*
|
||||
* If the document last modification was assigned a different term a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public long ifPrimaryTerm() {
|
||||
return ifPrimaryTerm;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -534,8 +573,8 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
// generate id if not already provided
|
||||
if (id == null) {
|
||||
assert autoGeneratedTimestamp == -1 : "timestamp has already been generated!";
|
||||
assert ifSeqNoMatch == SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
assert ifPrimaryTermMatch == 0;
|
||||
assert ifSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
assert ifPrimaryTerm == 0;
|
||||
autoGeneratedTimestamp = Math.max(0, System.currentTimeMillis()); // extra paranoia
|
||||
String uid;
|
||||
if (indexCreatedVersion.onOrAfter(Version.V_6_0_0_beta1)) {
|
||||
|
@ -578,11 +617,11 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
contentType = null;
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
ifSeqNoMatch = in.readZLong();
|
||||
ifPrimaryTermMatch = in.readVLong();
|
||||
ifSeqNo = in.readZLong();
|
||||
ifPrimaryTerm = in.readVLong();
|
||||
} else {
|
||||
ifSeqNoMatch = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
ifPrimaryTermMatch = 0;
|
||||
ifSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
ifPrimaryTerm = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -616,10 +655,10 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
|
||||
out.writeZLong(ifSeqNoMatch);
|
||||
out.writeVLong(ifPrimaryTermMatch);
|
||||
} else if (ifSeqNoMatch != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTermMatch != 0) {
|
||||
assert false : "setIfMatch [" + ifSeqNoMatch + "], currentDocTem [" + ifPrimaryTermMatch + "]";
|
||||
out.writeZLong(ifSeqNo);
|
||||
out.writeVLong(ifPrimaryTerm);
|
||||
} else if (ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTerm != 0) {
|
||||
assert false : "setIfMatch [" + ifSeqNo + "], currentDocTem [" + ifPrimaryTerm + "]";
|
||||
throw new IllegalStateException(
|
||||
"sequence number based compare and write is not supported until all nodes are on version 7.0 or higher. " +
|
||||
"Stream version [" + out.getVersion() + "]");
|
||||
|
|
|
@ -200,11 +200,26 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder<IndexRequest,
|
|||
}
|
||||
|
||||
/**
|
||||
* only performs this indexing request if the document was last modification was assigned the given
|
||||
* sequence number and primary term
|
||||
* only perform this indexing request if the document was last modification was assigned the given
|
||||
* sequence number. Must be used in combination with {@link #setIfPrimaryTerm(long)}
|
||||
*
|
||||
* If the document last modification was assigned a different sequence number a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public IndexRequestBuilder setIfMatch(long seqNo, long term) {
|
||||
request.ifMatch(seqNo, term);
|
||||
public IndexRequestBuilder setIfSeqNo(long seqNo) {
|
||||
request.setIfSeqNo(seqNo);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* only perform this indexing request if the document was last modification was assigned the given
|
||||
* primary term. Must be used in combination with {@link #setIfSeqNo(long)}
|
||||
*
|
||||
* If the document last modification was assigned a different term a
|
||||
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
|
||||
*/
|
||||
public IndexRequestBuilder setIfPrimaryTerm(long term) {
|
||||
request.setIfPrimaryTerm(term);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -1345,23 +1345,23 @@ public abstract class Engine implements Closeable {
|
|||
private final ParsedDocument doc;
|
||||
private final long autoGeneratedIdTimestamp;
|
||||
private final boolean isRetry;
|
||||
private final long ifSeqNoMatch;
|
||||
private final long ifPrimaryTermMatch;
|
||||
private final long ifSeqNo;
|
||||
private final long ifPrimaryTerm;
|
||||
|
||||
public Index(Term uid, ParsedDocument doc, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin,
|
||||
long startTime, long autoGeneratedIdTimestamp, boolean isRetry, long ifSeqNoMatch, long ifPrimaryTermMatch) {
|
||||
long startTime, long autoGeneratedIdTimestamp, boolean isRetry, long ifSeqNo, long ifPrimaryTerm) {
|
||||
super(uid, seqNo, primaryTerm, version, versionType, origin, startTime);
|
||||
assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin;
|
||||
assert ifPrimaryTermMatch >= 0 : "ifPrimaryTermMatch [" + ifPrimaryTermMatch + "] must be non negative";
|
||||
assert ifSeqNoMatch == SequenceNumbers.UNASSIGNED_SEQ_NO || ifSeqNoMatch >=0 :
|
||||
"ifSeqNoMatch [" + ifSeqNoMatch + "] must be non negative or unset";
|
||||
assert (origin == Origin.PRIMARY) || (ifSeqNoMatch == SequenceNumbers.UNASSIGNED_SEQ_NO && ifPrimaryTermMatch == 0) :
|
||||
assert ifPrimaryTerm >= 0 : "ifPrimaryTerm [" + ifPrimaryTerm + "] must be non negative";
|
||||
assert ifSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || ifSeqNo >=0 :
|
||||
"ifSeqNo [" + ifSeqNo + "] must be non negative or unset";
|
||||
assert (origin == Origin.PRIMARY) || (ifSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && ifPrimaryTerm == 0) :
|
||||
"cas operations are only allowed if origin is primary. get [" + origin + "]";
|
||||
this.doc = doc;
|
||||
this.isRetry = isRetry;
|
||||
this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp;
|
||||
this.ifSeqNoMatch = ifSeqNoMatch;
|
||||
this.ifPrimaryTermMatch = ifPrimaryTermMatch;
|
||||
this.ifSeqNo = ifSeqNo;
|
||||
this.ifPrimaryTerm = ifPrimaryTerm;
|
||||
}
|
||||
|
||||
public Index(Term uid, long primaryTerm, ParsedDocument doc) {
|
||||
|
@ -1426,12 +1426,12 @@ public abstract class Engine implements Closeable {
|
|||
return isRetry;
|
||||
}
|
||||
|
||||
public long getIfSeqNoMatch() {
|
||||
return ifSeqNoMatch;
|
||||
public long getIfSeqNo() {
|
||||
return ifSeqNo;
|
||||
}
|
||||
|
||||
public long getIfPrimaryTermMatch() {
|
||||
return ifPrimaryTermMatch;
|
||||
public long getIfPrimaryTerm() {
|
||||
return ifPrimaryTerm;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1439,22 +1439,22 @@ public abstract class Engine implements Closeable {
|
|||
|
||||
private final String type;
|
||||
private final String id;
|
||||
private final long ifSeqNoMatch;
|
||||
private final long ifPrimaryTermMatch;
|
||||
private final long ifSeqNo;
|
||||
private final long ifPrimaryTerm;
|
||||
|
||||
public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType,
|
||||
Origin origin, long startTime, long ifSeqNoMatch, long ifPrimaryTermMatch) {
|
||||
Origin origin, long startTime, long ifSeqNo, long ifPrimaryTerm) {
|
||||
super(uid, seqNo, primaryTerm, version, versionType, origin, startTime);
|
||||
assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin;
|
||||
assert ifPrimaryTermMatch >= 0 : "ifPrimaryTermMatch [" + ifPrimaryTermMatch + "] must be non negative";
|
||||
assert ifSeqNoMatch == SequenceNumbers.UNASSIGNED_SEQ_NO || ifSeqNoMatch >=0 :
|
||||
"ifSeqNoMatch [" + ifSeqNoMatch + "] must be non negative or unset";
|
||||
assert (origin == Origin.PRIMARY) || (ifSeqNoMatch == SequenceNumbers.UNASSIGNED_SEQ_NO && ifPrimaryTermMatch == 0) :
|
||||
assert ifPrimaryTerm >= 0 : "ifPrimaryTerm [" + ifPrimaryTerm + "] must be non negative";
|
||||
assert ifSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || ifSeqNo >=0 :
|
||||
"ifSeqNo [" + ifSeqNo + "] must be non negative or unset";
|
||||
assert (origin == Origin.PRIMARY) || (ifSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && ifPrimaryTerm == 0) :
|
||||
"cas operations are only allowed if origin is primary. get [" + origin + "]";
|
||||
this.type = Objects.requireNonNull(type);
|
||||
this.id = Objects.requireNonNull(id);
|
||||
this.ifSeqNoMatch = ifSeqNoMatch;
|
||||
this.ifPrimaryTermMatch = ifPrimaryTermMatch;
|
||||
this.ifSeqNo = ifSeqNo;
|
||||
this.ifPrimaryTerm = ifPrimaryTerm;
|
||||
}
|
||||
|
||||
public Delete(String type, String id, Term uid, long primaryTerm) {
|
||||
|
@ -1487,12 +1487,12 @@ public abstract class Engine implements Closeable {
|
|||
return (uid().field().length() + uid().text().length()) * 2 + 20;
|
||||
}
|
||||
|
||||
public long getIfSeqNoMatch() {
|
||||
return ifSeqNoMatch;
|
||||
public long getIfSeqNo() {
|
||||
return ifSeqNo;
|
||||
}
|
||||
|
||||
public long getIfPrimaryTermMatch() {
|
||||
return ifPrimaryTermMatch;
|
||||
public long getIfPrimaryTerm() {
|
||||
return ifPrimaryTerm;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -965,7 +965,7 @@ public class InternalEngine extends Engine {
|
|||
versionMap.enforceSafeAccess();
|
||||
// resolves incoming version
|
||||
final VersionValue versionValue =
|
||||
resolveDocVersion(index, index.getIfSeqNoMatch() != SequenceNumbers.UNASSIGNED_SEQ_NO);
|
||||
resolveDocVersion(index, index.getIfSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO);
|
||||
final long currentVersion;
|
||||
final boolean currentNotFoundOrDeleted;
|
||||
if (versionValue == null) {
|
||||
|
@ -975,15 +975,15 @@ public class InternalEngine extends Engine {
|
|||
currentVersion = versionValue.version;
|
||||
currentNotFoundOrDeleted = versionValue.isDelete();
|
||||
}
|
||||
if (index.getIfSeqNoMatch() != SequenceNumbers.UNASSIGNED_SEQ_NO && versionValue == null) {
|
||||
if (index.getIfSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO && versionValue == null) {
|
||||
final VersionConflictEngineException e = new VersionConflictEngineException(shardId, index.type(), index.id(),
|
||||
index.getIfSeqNoMatch(), index.getIfPrimaryTermMatch(), SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
|
||||
index.getIfSeqNo(), index.getIfPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
|
||||
plan = IndexingStrategy.skipDueToVersionConflict(e, currentNotFoundOrDeleted, currentVersion, getPrimaryTerm());
|
||||
} else if (index.getIfSeqNoMatch() != SequenceNumbers.UNASSIGNED_SEQ_NO && (
|
||||
versionValue.seqNo != index.getIfSeqNoMatch() || versionValue.term != index.getIfPrimaryTermMatch()
|
||||
} else if (index.getIfSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO && (
|
||||
versionValue.seqNo != index.getIfSeqNo() || versionValue.term != index.getIfPrimaryTerm()
|
||||
)) {
|
||||
final VersionConflictEngineException e = new VersionConflictEngineException(shardId, index.type(), index.id(),
|
||||
index.getIfSeqNoMatch(), index.getIfPrimaryTermMatch(), versionValue.seqNo, versionValue.term);
|
||||
index.getIfSeqNo(), index.getIfPrimaryTerm(), versionValue.seqNo, versionValue.term);
|
||||
plan = IndexingStrategy.skipDueToVersionConflict(e, currentNotFoundOrDeleted, currentVersion, getPrimaryTerm());
|
||||
} else if (index.versionType().isVersionConflictForWrites(
|
||||
currentVersion, index.version(), currentNotFoundOrDeleted)) {
|
||||
|
@ -1302,7 +1302,7 @@ public class InternalEngine extends Engine {
|
|||
assert delete.origin() == Operation.Origin.PRIMARY : "planing as primary but got " + delete.origin();
|
||||
assert getMaxSeqNoOfUpdatesOrDeletes() != SequenceNumbers.UNASSIGNED_SEQ_NO : "max_seq_no_of_updates is not initialized";
|
||||
// resolve operation from external to internal
|
||||
final VersionValue versionValue = resolveDocVersion(delete, delete.getIfSeqNoMatch() != SequenceNumbers.UNASSIGNED_SEQ_NO);
|
||||
final VersionValue versionValue = resolveDocVersion(delete, delete.getIfSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO);
|
||||
assert incrementVersionLookup();
|
||||
final long currentVersion;
|
||||
final boolean currentlyDeleted;
|
||||
|
@ -1314,15 +1314,15 @@ public class InternalEngine extends Engine {
|
|||
currentlyDeleted = versionValue.isDelete();
|
||||
}
|
||||
final DeletionStrategy plan;
|
||||
if (delete.getIfSeqNoMatch() != SequenceNumbers.UNASSIGNED_SEQ_NO && versionValue == null) {
|
||||
if (delete.getIfSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO && versionValue == null) {
|
||||
final VersionConflictEngineException e = new VersionConflictEngineException(shardId, delete.type(), delete.id(),
|
||||
delete.getIfSeqNoMatch(), delete.getIfPrimaryTermMatch(), SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
|
||||
delete.getIfSeqNo(), delete.getIfPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
|
||||
plan = DeletionStrategy.skipDueToVersionConflict(e, currentVersion, getPrimaryTerm(), currentlyDeleted);
|
||||
} else if (delete.getIfSeqNoMatch() != SequenceNumbers.UNASSIGNED_SEQ_NO && (
|
||||
versionValue.seqNo != delete.getIfSeqNoMatch() || versionValue.term != delete.getIfPrimaryTermMatch()
|
||||
} else if (delete.getIfSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO && (
|
||||
versionValue.seqNo != delete.getIfSeqNo() || versionValue.term != delete.getIfPrimaryTerm()
|
||||
)) {
|
||||
final VersionConflictEngineException e = new VersionConflictEngineException(shardId, delete.type(), delete.id(),
|
||||
delete.getIfSeqNoMatch(), delete.getIfPrimaryTermMatch(), versionValue.seqNo, versionValue.term);
|
||||
delete.getIfSeqNo(), delete.getIfPrimaryTerm(), versionValue.seqNo, versionValue.term);
|
||||
plan = DeletionStrategy.skipDueToVersionConflict(e, currentVersion, getPrimaryTerm(), currentlyDeleted);
|
||||
} else if (delete.versionType().isVersionConflictForWrites(currentVersion, delete.version(), currentlyDeleted)) {
|
||||
final VersionConflictEngineException e = new VersionConflictEngineException(shardId, delete, currentVersion, currentlyDeleted);
|
||||
|
|
|
@ -131,7 +131,7 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
}
|
||||
|
||||
/**
|
||||
* The sequence number assigned to the last operation to have changed this document, if found.
|
||||
* The sequence number assigned to the last operation that has changed this document, if found.
|
||||
*/
|
||||
public long getSeqNo() {
|
||||
return seqNo;
|
||||
|
|
|
@ -109,10 +109,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
|
||||
//TODO this needs to be cleaned up: _timestamp and _ttl are not supported anymore, _field_names, _seq_no, _version and _source are
|
||||
//also missing, not sure if on purpose. See IndicesModule#getMetadataMappers
|
||||
private static ObjectHashSet<String> META_FIELDS = ObjectHashSet.from(
|
||||
"_id", "_type", "_routing", "_index",
|
||||
"_size", "_timestamp", "_ttl", IgnoredFieldMapper.NAME
|
||||
);
|
||||
private static final String[] SORTED_META_FIELDS = new String[]{
|
||||
"_id", IgnoredFieldMapper.NAME, "_index", "_routing", "_size", "_timestamp", "_ttl", "_type"
|
||||
};
|
||||
|
||||
private static final ObjectHashSet<String> META_FIELDS = ObjectHashSet.from(SORTED_META_FIELDS);
|
||||
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(MapperService.class));
|
||||
|
||||
|
@ -762,7 +763,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
|
||||
public static String[] getAllMetaFields() {
|
||||
return META_FIELDS.toArray(String.class);
|
||||
return Arrays.copyOf(SORTED_META_FIELDS, SORTED_META_FIELDS.length);
|
||||
}
|
||||
|
||||
/** An analyzer wrapper that can lookup fields within the index mappings */
|
||||
|
@ -789,5 +790,4 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
return defaultAnalyzer;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -686,12 +686,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
|
||||
public Engine.IndexResult applyIndexOperationOnPrimary(long version, VersionType versionType, SourceToParse sourceToParse,
|
||||
long ifSeqNoMatch, long ifPrimaryTermMatch, long autoGeneratedTimestamp,
|
||||
long ifSeqNo, long ifPrimaryTerm, long autoGeneratedTimestamp,
|
||||
boolean isRetry)
|
||||
throws IOException {
|
||||
assert versionType.validateVersionForWrites(version);
|
||||
return applyIndexOperation(getEngine(), UNASSIGNED_SEQ_NO, operationPrimaryTerm, version, versionType, ifSeqNoMatch,
|
||||
ifPrimaryTermMatch, autoGeneratedTimestamp, isRetry, Engine.Operation.Origin.PRIMARY, sourceToParse);
|
||||
return applyIndexOperation(getEngine(), UNASSIGNED_SEQ_NO, operationPrimaryTerm, version, versionType, ifSeqNo,
|
||||
ifPrimaryTerm, autoGeneratedTimestamp, isRetry, Engine.Operation.Origin.PRIMARY, sourceToParse);
|
||||
}
|
||||
|
||||
public Engine.IndexResult applyIndexOperationOnReplica(long seqNo, long version, long autoGeneratedTimeStamp,
|
||||
|
@ -702,7 +702,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
|
||||
private Engine.IndexResult applyIndexOperation(Engine engine, long seqNo, long opPrimaryTerm, long version,
|
||||
@Nullable VersionType versionType, long ifSeqNoMatch, long ifPrimaryTermMatch,
|
||||
@Nullable VersionType versionType, long ifSeqNo, long ifPrimaryTerm,
|
||||
long autoGeneratedTimeStamp, boolean isRetry, Engine.Operation.Origin origin,
|
||||
SourceToParse sourceToParse) throws IOException {
|
||||
assert opPrimaryTerm <= this.operationPrimaryTerm: "op term [ " + opPrimaryTerm + " ] > shard term [" + this.operationPrimaryTerm
|
||||
|
@ -712,7 +712,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
try {
|
||||
operation = prepareIndex(docMapper(sourceToParse.type()), indexSettings.getIndexVersionCreated(), sourceToParse, seqNo,
|
||||
opPrimaryTerm, version, versionType, origin, autoGeneratedTimeStamp, isRetry,
|
||||
ifSeqNoMatch, ifPrimaryTermMatch);
|
||||
ifSeqNo, ifPrimaryTerm);
|
||||
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
|
||||
if (update != null) {
|
||||
return new Engine.IndexResult(update);
|
||||
|
@ -732,7 +732,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
public static Engine.Index prepareIndex(DocumentMapperForType docMapper, Version indexCreatedVersion, SourceToParse source, long seqNo,
|
||||
long primaryTerm, long version, VersionType versionType, Engine.Operation.Origin origin,
|
||||
long autoGeneratedIdTimestamp, boolean isRetry,
|
||||
long ifSeqNoMatch, long ifPrimaryTermMatch) {
|
||||
long ifSeqNo, long ifPrimaryTerm) {
|
||||
long startTime = System.nanoTime();
|
||||
ParsedDocument doc = docMapper.getDocumentMapper().parse(source);
|
||||
if (docMapper.getMapping() != null) {
|
||||
|
@ -740,7 +740,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(doc.id()));
|
||||
return new Engine.Index(uid, doc, seqNo, primaryTerm, version, versionType, origin, startTime, autoGeneratedIdTimestamp, isRetry,
|
||||
ifSeqNoMatch, ifPrimaryTermMatch);
|
||||
ifSeqNo, ifPrimaryTerm);
|
||||
}
|
||||
|
||||
private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOException {
|
||||
|
@ -792,11 +792,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
|
||||
public Engine.DeleteResult applyDeleteOperationOnPrimary(long version, String type, String id, VersionType versionType,
|
||||
long ifSeqNoMatch, long ifPrimaryTermMatch)
|
||||
long ifSeqNo, long ifPrimaryTerm)
|
||||
throws IOException {
|
||||
assert versionType.validateVersionForWrites(version);
|
||||
return applyDeleteOperation(getEngine(), UNASSIGNED_SEQ_NO, operationPrimaryTerm, version, type, id, versionType,
|
||||
ifSeqNoMatch, ifPrimaryTermMatch, Engine.Operation.Origin.PRIMARY);
|
||||
ifSeqNo, ifPrimaryTerm, Engine.Operation.Origin.PRIMARY);
|
||||
}
|
||||
|
||||
public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id) throws IOException {
|
||||
|
@ -805,7 +805,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
|
||||
private Engine.DeleteResult applyDeleteOperation(Engine engine, long seqNo, long opPrimaryTerm, long version, String type, String id,
|
||||
@Nullable VersionType versionType, long ifSeqNoMatch, long ifPrimaryTermMatch,
|
||||
@Nullable VersionType versionType, long ifSeqNo, long ifPrimaryTerm,
|
||||
Engine.Operation.Origin origin) throws IOException {
|
||||
assert opPrimaryTerm <= this.operationPrimaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.operationPrimaryTerm
|
||||
+ "]";
|
||||
|
@ -835,16 +835,16 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
|
||||
final Engine.Delete delete = prepareDelete(type, id, uid, seqNo, opPrimaryTerm, version,
|
||||
versionType, origin, ifSeqNoMatch, ifPrimaryTermMatch);
|
||||
versionType, origin, ifSeqNo, ifPrimaryTerm);
|
||||
return delete(engine, delete);
|
||||
}
|
||||
|
||||
private Engine.Delete prepareDelete(String type, String id, Term uid, long seqNo, long primaryTerm, long version,
|
||||
VersionType versionType, Engine.Operation.Origin origin,
|
||||
long ifSeqNoMatch, long ifPrimaryTermMatch) {
|
||||
long ifSeqNo, long ifPrimaryTerm) {
|
||||
long startTime = System.nanoTime();
|
||||
return new Engine.Delete(resolveType(type), id, uid, seqNo, primaryTerm, version, versionType, origin, startTime,
|
||||
ifSeqNoMatch, ifPrimaryTermMatch);
|
||||
ifSeqNo, ifPrimaryTerm);
|
||||
}
|
||||
|
||||
private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws IOException {
|
||||
|
|
|
@ -66,6 +66,8 @@ public class RestDeleteAction extends BaseRestHandler {
|
|||
deleteRequest.setRefreshPolicy(request.param("refresh"));
|
||||
deleteRequest.version(RestActions.parseVersion(request));
|
||||
deleteRequest.versionType(VersionType.fromString(request.param("version_type"), deleteRequest.versionType()));
|
||||
deleteRequest.setIfSeqNo(request.paramAsLong("if_seq_no", deleteRequest.ifSeqNo()));
|
||||
deleteRequest.setIfPrimaryTerm(request.paramAsLong("if_primary_term", deleteRequest.ifPrimaryTerm()));
|
||||
|
||||
String waitForActiveShards = request.param("wait_for_active_shards");
|
||||
if (waitForActiveShards != null) {
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
|
||||
package org.elasticsearch.rest.action.document;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
@ -38,6 +40,11 @@ import static org.elasticsearch.rest.RestRequest.Method.POST;
|
|||
import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
||||
|
||||
public class RestIndexAction extends BaseRestHandler {
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(
|
||||
LogManager.getLogger(RestDeleteAction.class));
|
||||
public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " +
|
||||
"document index requests is deprecated, use the /{index}/_doc/{id} or /{index}/_doc endpoints instead.";
|
||||
|
||||
public RestIndexAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(POST, "/{index}/{type}", this); // auto id creation
|
||||
|
@ -79,13 +86,15 @@ public class RestIndexAction extends BaseRestHandler {
|
|||
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
final boolean includeTypeName = request.paramAsBoolean("include_type_name", true);
|
||||
IndexRequest indexRequest;
|
||||
final String type = request.param("type");
|
||||
if (includeTypeName == false && MapperService.SINGLE_MAPPING_NAME.equals(type) == false) {
|
||||
throw new IllegalArgumentException("You may only use the [include_type_name=false] option with the index APIs with the " +
|
||||
"[{index}/_doc/{id}] and [{index}/_doc] endpoints.");
|
||||
if (type.equals(MapperService.SINGLE_MAPPING_NAME) == false) {
|
||||
deprecationLogger.deprecatedAndMaybeLog("index_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
indexRequest = new IndexRequest(request.param("index"), type, request.param("id"));
|
||||
} else {
|
||||
indexRequest = new IndexRequest(request.param("index"));
|
||||
indexRequest.id(request.param("id"));
|
||||
}
|
||||
IndexRequest indexRequest = new IndexRequest(request.param("index"), type, request.param("id"));
|
||||
indexRequest.routing(request.param("routing"));
|
||||
indexRequest.setPipeline(request.param("pipeline"));
|
||||
indexRequest.source(request.requiredContent(), request.getXContentType());
|
||||
|
@ -93,6 +102,8 @@ public class RestIndexAction extends BaseRestHandler {
|
|||
indexRequest.setRefreshPolicy(request.param("refresh"));
|
||||
indexRequest.version(RestActions.parseVersion(request));
|
||||
indexRequest.versionType(VersionType.fromString(request.param("version_type"), indexRequest.versionType()));
|
||||
indexRequest.setIfSeqNo(request.paramAsLong("if_seq_no", indexRequest.ifSeqNo()));
|
||||
indexRequest.setIfPrimaryTerm(request.paramAsLong("if_primary_term", indexRequest.ifPrimaryTerm()));
|
||||
String sOpType = request.param("op_type");
|
||||
String waitForActiveShards = request.param("wait_for_active_shards");
|
||||
if (waitForActiveShards != null) {
|
||||
|
|
|
@ -73,7 +73,7 @@ public class IndexRequestTests extends ESTestCase {
|
|||
Set<VersionType> allButInternalSet = new HashSet<>(Arrays.asList(VersionType.values()));
|
||||
allButInternalSet.remove(VersionType.INTERNAL);
|
||||
VersionType[] allButInternal = allButInternalSet.toArray(new VersionType[]{});
|
||||
IndexRequest request = new IndexRequest("index", "type", "1");
|
||||
IndexRequest request = new IndexRequest("index").id("1");
|
||||
request.opType(IndexRequest.OpType.CREATE);
|
||||
request.versionType(randomFrom(allButInternal));
|
||||
assertThat(request.validate().validationErrors(), not(empty()));
|
||||
|
@ -85,19 +85,19 @@ public class IndexRequestTests extends ESTestCase {
|
|||
|
||||
public void testIndexingRejectsLongIds() {
|
||||
String id = randomAlphaOfLength(511);
|
||||
IndexRequest request = new IndexRequest("index", "type", id);
|
||||
IndexRequest request = new IndexRequest("index").id( id);
|
||||
request.source("{}", XContentType.JSON);
|
||||
ActionRequestValidationException validate = request.validate();
|
||||
assertNull(validate);
|
||||
|
||||
id = randomAlphaOfLength(512);
|
||||
request = new IndexRequest("index", "type", id);
|
||||
request = new IndexRequest("index").id( id);
|
||||
request.source("{}", XContentType.JSON);
|
||||
validate = request.validate();
|
||||
assertNull(validate);
|
||||
|
||||
id = randomAlphaOfLength(513);
|
||||
request = new IndexRequest("index", "type", id);
|
||||
request = new IndexRequest("index").id( id);
|
||||
request.source("{}", XContentType.JSON);
|
||||
validate = request.validate();
|
||||
assertThat(validate, notNullValue());
|
||||
|
@ -106,7 +106,7 @@ public class IndexRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testWaitForActiveShards() {
|
||||
IndexRequest request = new IndexRequest("index", "type");
|
||||
IndexRequest request = new IndexRequest("index");
|
||||
final int count = randomIntBetween(0, 10);
|
||||
request.waitForActiveShards(ActiveShardCount.from(count));
|
||||
assertEquals(request.waitForActiveShards(), ActiveShardCount.from(count));
|
||||
|
@ -115,10 +115,10 @@ public class IndexRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testAutoGenIdTimestampIsSet() {
|
||||
IndexRequest request = new IndexRequest("index", "type");
|
||||
IndexRequest request = new IndexRequest("index");
|
||||
request.process(Version.CURRENT, null, "index");
|
||||
assertTrue("expected > 0 but got: " + request.getAutoGeneratedTimestamp(), request.getAutoGeneratedTimestamp() > 0);
|
||||
request = new IndexRequest("index", "type", "1");
|
||||
request = new IndexRequest("index").id("1");
|
||||
request.process(Version.CURRENT, null, "index");
|
||||
assertEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, request.getAutoGeneratedTimestamp());
|
||||
}
|
||||
|
@ -156,7 +156,7 @@ public class IndexRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testIndexRequestXContentSerialization() throws IOException {
|
||||
IndexRequest indexRequest = new IndexRequest("foo", "bar", "1");
|
||||
IndexRequest indexRequest = new IndexRequest("foo").id("1");
|
||||
indexRequest.source("{}", XContentType.JSON);
|
||||
assertEquals(XContentType.JSON, indexRequest.getContentType());
|
||||
|
||||
|
@ -171,7 +171,7 @@ public class IndexRequestTests extends ESTestCase {
|
|||
|
||||
// reindex makes use of index requests without a source so this needs to be handled
|
||||
public void testSerializationOfEmptyRequestWorks() throws IOException {
|
||||
IndexRequest request = new IndexRequest("index", "type");
|
||||
IndexRequest request = new IndexRequest("index");
|
||||
assertNull(request.getContentType());
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
request.writeTo(out);
|
||||
|
@ -181,27 +181,26 @@ public class IndexRequestTests extends ESTestCase {
|
|||
serialized.readFrom(in);
|
||||
assertNull(request.getContentType());
|
||||
assertEquals("index", request.index());
|
||||
assertEquals("type", request.type());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testToStringSizeLimit() throws UnsupportedEncodingException {
|
||||
IndexRequest request = new IndexRequest("index", "type");
|
||||
IndexRequest request = new IndexRequest("index");
|
||||
|
||||
String source = "{\"name\":\"value\"}";
|
||||
request.source(source, XContentType.JSON);
|
||||
assertEquals("index {[index][type][null], source[" + source + "]}", request.toString());
|
||||
assertEquals("index {[index][_doc][null], source[" + source + "]}", request.toString());
|
||||
|
||||
source = "{\"name\":\"" + randomUnicodeOfLength(IndexRequest.MAX_SOURCE_LENGTH_IN_TOSTRING) + "\"}";
|
||||
request.source(source, XContentType.JSON);
|
||||
int actualBytes = source.getBytes("UTF-8").length;
|
||||
assertEquals("index {[index][type][null], source[n/a, actual length: [" + new ByteSizeValue(actualBytes).toString() +
|
||||
assertEquals("index {[index][_doc][null], source[n/a, actual length: [" + new ByteSizeValue(actualBytes).toString() +
|
||||
"], max length: " + new ByteSizeValue(IndexRequest.MAX_SOURCE_LENGTH_IN_TOSTRING).toString() + "]}", request.toString());
|
||||
}
|
||||
|
||||
public void testRejectsEmptyStringPipeline() {
|
||||
IndexRequest request = new IndexRequest("index", "type");
|
||||
IndexRequest request = new IndexRequest("index");
|
||||
request.source("{}", XContentType.JSON);
|
||||
request.setPipeline("");
|
||||
ActionRequestValidationException validate = request.validate();
|
||||
|
|
|
@ -659,6 +659,6 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
request = new UpdateRequest("test", "type1", "1").fromXContent(
|
||||
createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"bar\"}}")));
|
||||
assertThat(request.toString(), equalTo("update {[test][type1][1], doc_as_upsert[false], "
|
||||
+ "doc[index {[null][null][null], source[{\"body\":\"bar\"}]}], scripted_upsert[false], detect_noop[true]}"));
|
||||
+ "doc[index {[null][_doc][null], source[{\"body\":\"bar\"}]}], scripted_upsert[false], detect_noop[true]}"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,17 +21,40 @@ package org.elasticsearch.rest.action.document;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
import org.elasticsearch.test.rest.RestActionTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class RestIndexActionTests extends ESTestCase {
|
||||
public class RestIndexActionTests extends RestActionTestCase {
|
||||
|
||||
private RestIndexAction action;
|
||||
|
||||
@Before
|
||||
public void setUpAction() {
|
||||
action = new RestIndexAction(Settings.EMPTY, controller());
|
||||
}
|
||||
|
||||
public void testTypeInPath() {
|
||||
RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry())
|
||||
.withMethod(RestRequest.Method.PUT)
|
||||
.withPath("/some_index/some_type/some_id")
|
||||
.build();
|
||||
dispatchRequest(deprecatedRequest);
|
||||
assertWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE);
|
||||
|
||||
RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry())
|
||||
.withMethod(RestRequest.Method.PUT)
|
||||
.withPath("/some_index/_doc/some_id")
|
||||
.build();
|
||||
dispatchRequest(validRequest);
|
||||
}
|
||||
|
||||
public void testCreateOpTypeValidation() throws Exception {
|
||||
Settings settings = settings(Version.CURRENT).build();
|
||||
RestIndexAction.CreateHandler create = new RestIndexAction(settings, mock(RestController.class)).new CreateHandler(settings);
|
||||
RestIndexAction.CreateHandler create = action.new CreateHandler(settings);
|
||||
|
||||
String opType = randomFrom("CREATE", null);
|
||||
create.validateOpType(opType);
|
||||
|
|
|
@ -295,21 +295,21 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
assertThat(indexResponse.getPrimaryTerm(), equalTo(1L));
|
||||
|
||||
assertThrows(
|
||||
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfMatch(10, 1).execute(),
|
||||
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(1).execute(),
|
||||
VersionConflictEngineException.class);
|
||||
|
||||
assertThrows(
|
||||
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfMatch(10, 2).execute(),
|
||||
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(10).setIfPrimaryTerm(2).execute(),
|
||||
VersionConflictEngineException.class);
|
||||
|
||||
assertThrows(
|
||||
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfMatch(1, 2).execute(),
|
||||
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(1).setIfPrimaryTerm(2).execute(),
|
||||
VersionConflictEngineException.class);
|
||||
|
||||
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(10, 1).execute(), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(10, 2).execute(), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 2).execute(), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfSeqNo(10).setIfPrimaryTerm(1), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfSeqNo(10).setIfPrimaryTerm(2), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(2), VersionConflictEngineException.class);
|
||||
|
||||
client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
|
@ -331,19 +331,19 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(Versions.NOT_FOUND));
|
||||
}
|
||||
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setIfMatch(1, 1).execute().actionGet();
|
||||
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(1).get();
|
||||
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getSeqNo(), equalTo(2L));
|
||||
assertThat(deleteResponse.getPrimaryTerm(), equalTo(1L));
|
||||
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 1).execute(), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(3, 2).execute(), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 2).execute(), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(1), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfSeqNo(3).setIfPrimaryTerm(12), VersionConflictEngineException.class);
|
||||
assertThrows(client().prepareDelete("test", "type", "1").setIfSeqNo(1).setIfPrimaryTerm(2), VersionConflictEngineException.class);
|
||||
|
||||
|
||||
// This is intricate - the object was deleted but a delete transaction was with the right version. We add another one
|
||||
// and thus the transaction is increased.
|
||||
deleteResponse = client().prepareDelete("test", "type", "1").setIfMatch(2, 1).execute().actionGet();
|
||||
deleteResponse = client().prepareDelete("test", "type", "1").setIfSeqNo(2).setIfPrimaryTerm(1).get();
|
||||
assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult());
|
||||
assertThat(deleteResponse.getSeqNo(), equalTo(3L));
|
||||
assertThat(deleteResponse.getPrimaryTerm(), equalTo(1L));
|
||||
|
|
|
@ -58,10 +58,13 @@ public abstract class CcrSingleNodeTestCase extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
@After
|
||||
public void remoteLocalRemote() throws Exception {
|
||||
public void purgeCCRMetadata() throws Exception {
|
||||
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
removeCCRRelatedMetadataFromClusterState(clusterService);
|
||||
}
|
||||
|
||||
@After
|
||||
public void removeLocalRemote() {
|
||||
ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest();
|
||||
updateSettingsRequest.transientSettings(Settings.builder().put("cluster.remote.local.seeds", (String) null));
|
||||
assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet());
|
||||
|
@ -70,8 +73,8 @@ public abstract class CcrSingleNodeTestCase extends ESSingleNodeTestCase {
|
|||
protected ResumeFollowAction.Request getResumeFollowRequest(String followerIndex) {
|
||||
ResumeFollowAction.Request request = new ResumeFollowAction.Request();
|
||||
request.setFollowerIndex(followerIndex);
|
||||
request.setMaxRetryDelay(TimeValue.timeValueMillis(10));
|
||||
request.setReadPollTimeout(TimeValue.timeValueMillis(10));
|
||||
request.setMaxRetryDelay(TimeValue.timeValueMillis(1));
|
||||
request.setReadPollTimeout(TimeValue.timeValueMillis(1));
|
||||
return request;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
package org.elasticsearch.xpack.ccr;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -23,6 +24,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
|||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/36764")
|
||||
public class LocalIndexFollowingIT extends CcrSingleNodeTestCase {
|
||||
|
||||
public void testFollowIndex() throws Exception {
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
package org.elasticsearch.xpack.ccr.action;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
|
@ -13,11 +14,13 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.persistent.PersistentTasksCustomMetaData;
|
||||
import org.elasticsearch.xpack.CcrSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction;
|
||||
import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
|
||||
import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction;
|
||||
import org.elasticsearch.xpack.core.ccr.action.PutFollowAction;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
|
@ -31,6 +34,7 @@ import static org.hamcrest.collection.IsEmptyCollection.empty;
|
|||
* Test scope is important to ensure that other tests added to this suite do not interfere with the expectation in
|
||||
* testStatsWhenNoPersistentTasksMetaDataExists that the cluster state does not contain any persistent tasks metadata.
|
||||
*/
|
||||
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/36764")
|
||||
public class FollowStatsIT extends CcrSingleNodeTestCase {
|
||||
|
||||
/**
|
||||
|
@ -106,6 +110,12 @@ public class FollowStatsIT extends CcrSingleNodeTestCase {
|
|||
|
||||
assertAcked(client().execute(PauseFollowAction.INSTANCE, new PauseFollowAction.Request("follower1")).actionGet());
|
||||
assertAcked(client().execute(PauseFollowAction.INSTANCE, new PauseFollowAction.Request("follower2")).actionGet());
|
||||
|
||||
assertBusy(() -> {
|
||||
List<FollowStatsAction.StatsResponse> responseList =
|
||||
client().execute(CcrStatsAction.INSTANCE, new CcrStatsAction.Request()).actionGet().getFollowStats().getStatsResponses();
|
||||
assertThat(responseList.size(), equalTo(0));
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -318,11 +318,11 @@ public class FollowingEngineTests extends ESTestCase {
|
|||
Engine.Index index = (Engine.Index) op;
|
||||
result = engine.index(new Engine.Index(index.uid(), index.parsedDoc(), index.seqNo(), primaryTerm, index.version(),
|
||||
versionType, origin, index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry(),
|
||||
index.getIfSeqNoMatch(), index.getIfPrimaryTermMatch()));
|
||||
index.getIfSeqNo(), index.getIfPrimaryTerm()));
|
||||
} else if (op instanceof Engine.Delete) {
|
||||
Engine.Delete delete = (Engine.Delete) op;
|
||||
result = engine.delete(new Engine.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), primaryTerm,
|
||||
delete.version(), versionType, origin, delete.startTime(), delete.getIfSeqNoMatch(), delete.getIfPrimaryTermMatch()));
|
||||
delete.version(), versionType, origin, delete.startTime(), delete.getIfSeqNo(), delete.getIfPrimaryTerm()));
|
||||
} else {
|
||||
Engine.NoOp noOp = (Engine.NoOp) op;
|
||||
result = engine.noOp(new Engine.NoOp(noOp.seqNo(), primaryTerm, origin, noOp.startTime(), noOp.reason()));
|
||||
|
|
|
@ -0,0 +1,244 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.ml.annotations;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Annotation implements ToXContentObject, Writeable {
|
||||
|
||||
public static final ParseField ANNOTATION = new ParseField("annotation");
|
||||
public static final ParseField CREATE_TIME = new ParseField("create_time");
|
||||
public static final ParseField CREATE_USERNAME = new ParseField("create_username");
|
||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||
public static final ParseField END_TIMESTAMP = new ParseField("end_timestamp");
|
||||
public static final ParseField MODIFIED_TIME = new ParseField("modified_time");
|
||||
public static final ParseField MODIFIED_USERNAME = new ParseField("modified_username");
|
||||
public static final ParseField TYPE = new ParseField("type");
|
||||
|
||||
public static final ObjectParser<Annotation, Void> PARSER = new ObjectParser<>(TYPE.getPreferredName(), true, Annotation::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Annotation::setAnnotation, ANNOTATION);
|
||||
PARSER.declareField(Annotation::setCreateTime,
|
||||
p -> TimeUtils.parseTimeField(p, CREATE_TIME.getPreferredName()), CREATE_TIME, ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareString(Annotation::setCreateUsername, CREATE_USERNAME);
|
||||
PARSER.declareField(Annotation::setTimestamp,
|
||||
p -> TimeUtils.parseTimeField(p, TIMESTAMP.getPreferredName()), TIMESTAMP, ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareField(Annotation::setEndTimestamp,
|
||||
p -> TimeUtils.parseTimeField(p, END_TIMESTAMP.getPreferredName()), END_TIMESTAMP, ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareString(Annotation::setJobId, Job.ID);
|
||||
PARSER.declareField(Annotation::setModifiedTime,
|
||||
p -> TimeUtils.parseTimeField(p, MODIFIED_TIME.getPreferredName()), MODIFIED_TIME, ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareString(Annotation::setModifiedUsername, MODIFIED_USERNAME);
|
||||
PARSER.declareString(Annotation::setType, TYPE);
|
||||
}
|
||||
|
||||
private String annotation;
|
||||
private Date createTime;
|
||||
private String createUsername;
|
||||
private Date timestamp;
|
||||
private Date endTimestamp;
|
||||
/**
|
||||
* Unlike most ML classes, this may be <code>null</code> or wildcarded
|
||||
*/
|
||||
private String jobId;
|
||||
private Date modifiedTime;
|
||||
private String modifiedUsername;
|
||||
private String type;
|
||||
|
||||
private Annotation() {
|
||||
}
|
||||
|
||||
public Annotation(String annotation, Date createTime, String createUsername, Date timestamp, Date endTimestamp, String jobId,
|
||||
Date modifiedTime, String modifiedUsername, String type) {
|
||||
this.annotation = Objects.requireNonNull(annotation);
|
||||
this.createTime = Objects.requireNonNull(createTime);
|
||||
this.createUsername = Objects.requireNonNull(createUsername);
|
||||
this.timestamp = Objects.requireNonNull(timestamp);
|
||||
this.endTimestamp = endTimestamp;
|
||||
this.jobId = jobId;
|
||||
this.modifiedTime = modifiedTime;
|
||||
this.modifiedUsername = modifiedUsername;
|
||||
this.type = Objects.requireNonNull(type);
|
||||
}
|
||||
|
||||
public Annotation(StreamInput in) throws IOException {
|
||||
annotation = in.readString();
|
||||
createTime = new Date(in.readLong());
|
||||
createUsername = in.readString();
|
||||
timestamp = new Date(in.readLong());
|
||||
if (in.readBoolean()) {
|
||||
endTimestamp = new Date(in.readLong());
|
||||
} else {
|
||||
endTimestamp = null;
|
||||
}
|
||||
jobId = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
modifiedTime = new Date(in.readLong());
|
||||
} else {
|
||||
modifiedTime = null;
|
||||
}
|
||||
modifiedUsername = in.readOptionalString();
|
||||
type = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(annotation);
|
||||
out.writeLong(createTime.getTime());
|
||||
out.writeString(createUsername);
|
||||
out.writeLong(timestamp.getTime());
|
||||
if (endTimestamp != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeLong(endTimestamp.getTime());
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeOptionalString(jobId);
|
||||
if (modifiedTime != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeLong(modifiedTime.getTime());
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeOptionalString(modifiedUsername);
|
||||
out.writeString(type);
|
||||
|
||||
}
|
||||
|
||||
public String getAnnotation() {
|
||||
return annotation;
|
||||
}
|
||||
|
||||
public void setAnnotation(String annotation) {
|
||||
this.annotation = Objects.requireNonNull(annotation);
|
||||
}
|
||||
|
||||
public Date getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
||||
public void setCreateTime(Date createTime) {
|
||||
this.createTime = Objects.requireNonNull(createTime);
|
||||
}
|
||||
|
||||
public String getCreateUsername() {
|
||||
return createUsername;
|
||||
}
|
||||
|
||||
public void setCreateUsername(String createUsername) {
|
||||
this.createUsername = Objects.requireNonNull(createUsername);
|
||||
}
|
||||
|
||||
public Date getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public void setTimestamp(Date timestamp) {
|
||||
this.timestamp = Objects.requireNonNull(timestamp);
|
||||
}
|
||||
|
||||
public Date getEndTimestamp() {
|
||||
return endTimestamp;
|
||||
}
|
||||
|
||||
public void setEndTimestamp(Date endTimestamp) {
|
||||
this.endTimestamp = endTimestamp;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
public Date getModifiedTime() {
|
||||
return modifiedTime;
|
||||
}
|
||||
|
||||
public void setModifiedTime(Date modifiedTime) {
|
||||
this.modifiedTime = modifiedTime;
|
||||
}
|
||||
|
||||
public String getModifiedUsername() {
|
||||
return modifiedUsername;
|
||||
}
|
||||
|
||||
public void setModifiedUsername(String modifiedUsername) {
|
||||
this.modifiedUsername = modifiedUsername;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = Objects.requireNonNull(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ANNOTATION.getPreferredName(), annotation);
|
||||
builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.getTime());
|
||||
builder.field(CREATE_USERNAME.getPreferredName(), createUsername);
|
||||
builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime());
|
||||
if (endTimestamp != null) {
|
||||
builder.timeField(END_TIMESTAMP.getPreferredName(), END_TIMESTAMP.getPreferredName() + "_string", endTimestamp.getTime());
|
||||
}
|
||||
if (jobId != null) {
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
}
|
||||
if (modifiedTime != null) {
|
||||
builder.timeField(MODIFIED_TIME.getPreferredName(), MODIFIED_TIME.getPreferredName() + "_string", modifiedTime.getTime());
|
||||
}
|
||||
if (modifiedUsername != null) {
|
||||
builder.field(MODIFIED_USERNAME.getPreferredName(), modifiedUsername);
|
||||
}
|
||||
builder.field(TYPE.getPreferredName(), type);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(annotation, createTime, createUsername, timestamp, endTimestamp, jobId, modifiedTime, modifiedUsername, type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Annotation other = (Annotation) obj;
|
||||
return Objects.equals(annotation, other.annotation) &&
|
||||
Objects.equals(createTime, other.createTime) &&
|
||||
Objects.equals(createUsername, other.createUsername) &&
|
||||
Objects.equals(timestamp, other.timestamp) &&
|
||||
Objects.equals(endTimestamp, other.endTimestamp) &&
|
||||
Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(modifiedTime, other.modifiedTime) &&
|
||||
Objects.equals(modifiedUsername, other.modifiedUsername) &&
|
||||
Objects.equals(type, other.type);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.ml.annotations;
|
||||
|
||||
import org.elasticsearch.ResourceAlreadyExistsException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.AliasOrIndex;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.core.ml.MachineLearningField;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.SortedMap;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
|
||||
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
|
||||
|
||||
public class AnnotationIndex {
|
||||
|
||||
public static final String READ_ALIAS_NAME = ".ml-annotations-read";
|
||||
public static final String WRITE_ALIAS_NAME = ".ml-annotations-write";
|
||||
// Exposed for testing, but always use the aliases in non-test code
|
||||
public static final String INDEX_NAME = ".ml-annotations-6";
|
||||
|
||||
/**
|
||||
* Create the .ml-annotations index with correct mappings.
|
||||
* This index is read and written by the UI results views,
|
||||
* so needs to exist when there might be ML results to view.
|
||||
*/
|
||||
public static void createAnnotationsIndex(Settings settings, Client client, ClusterState state,
|
||||
final ActionListener<Boolean> finalListener) {
|
||||
|
||||
final ActionListener<Boolean> createAliasListener = ActionListener.wrap(success -> {
|
||||
final IndicesAliasesRequest request = client.admin().indices().prepareAliases()
|
||||
.addAlias(INDEX_NAME, READ_ALIAS_NAME)
|
||||
.addAlias(INDEX_NAME, WRITE_ALIAS_NAME).request();
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, request,
|
||||
ActionListener.<AcknowledgedResponse>wrap(r -> finalListener.onResponse(r.isAcknowledged()), finalListener::onFailure),
|
||||
client.admin().indices()::aliases);
|
||||
}, finalListener::onFailure);
|
||||
|
||||
// Only create the index or aliases if some other ML index exists - saves clutter if ML is never used.
|
||||
SortedMap<String, AliasOrIndex> mlLookup = state.getMetaData().getAliasAndIndexLookup().tailMap(".ml");
|
||||
if (mlLookup.isEmpty() == false && mlLookup.firstKey().startsWith(".ml")) {
|
||||
|
||||
// Create the annotations index if it doesn't exist already.
|
||||
if (mlLookup.containsKey(INDEX_NAME) == false) {
|
||||
|
||||
final TimeValue delayedNodeTimeOutSetting;
|
||||
// Whether we are using native process is a good way to detect whether we are in dev / test mode:
|
||||
if (MachineLearningField.AUTODETECT_PROCESS.get(settings)) {
|
||||
delayedNodeTimeOutSetting = UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(settings);
|
||||
} else {
|
||||
delayedNodeTimeOutSetting = TimeValue.ZERO;
|
||||
}
|
||||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX_NAME);
|
||||
try (XContentBuilder annotationsMapping = AnnotationIndex.annotationsMapping()) {
|
||||
createIndexRequest.mapping(ElasticsearchMappings.DOC_TYPE, annotationsMapping);
|
||||
createIndexRequest.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1")
|
||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting));
|
||||
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest,
|
||||
ActionListener.<CreateIndexResponse>wrap(
|
||||
r -> createAliasListener.onResponse(r.isAcknowledged()),
|
||||
e -> {
|
||||
// Possible that the index was created while the request was executing,
|
||||
// so we need to handle that possibility
|
||||
if (e instanceof ResourceAlreadyExistsException) {
|
||||
// Create the alias
|
||||
createAliasListener.onResponse(true);
|
||||
} else {
|
||||
finalListener.onFailure(e);
|
||||
}
|
||||
}
|
||||
), client.admin().indices()::create);
|
||||
} catch (IOException e) {
|
||||
finalListener.onFailure(e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Recreate the aliases if they've gone even though the index still exists.
|
||||
if (mlLookup.containsKey(READ_ALIAS_NAME) == false || mlLookup.containsKey(WRITE_ALIAS_NAME) == false) {
|
||||
createAliasListener.onResponse(true);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Nothing to do, but respond to the listener
|
||||
finalListener.onResponse(false);
|
||||
}
|
||||
|
||||
public static XContentBuilder annotationsMapping() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(ElasticsearchMappings.DOC_TYPE)
|
||||
.startObject(ElasticsearchMappings.PROPERTIES)
|
||||
.startObject(Annotation.ANNOTATION.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.TEXT)
|
||||
.endObject()
|
||||
.startObject(Annotation.CREATE_TIME.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.DATE)
|
||||
.endObject()
|
||||
.startObject(Annotation.CREATE_USERNAME.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Annotation.TIMESTAMP.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.DATE)
|
||||
.endObject()
|
||||
.startObject(Annotation.END_TIMESTAMP.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.DATE)
|
||||
.endObject()
|
||||
.startObject(Job.ID.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Annotation.MODIFIED_TIME.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.DATE)
|
||||
.endObject()
|
||||
.startObject(Annotation.MODIFIED_USERNAME.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD)
|
||||
.endObject()
|
||||
.startObject(Annotation.TYPE.getPreferredName())
|
||||
.field(ElasticsearchMappings.TYPE, ElasticsearchMappings.KEYWORD)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
}
|
|
@ -136,13 +136,22 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene
|
|||
.put(UsernamesField.APM_ROLE, new RoleDescriptor(UsernamesField.APM_ROLE,
|
||||
new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||
.put("machine_learning_user", new RoleDescriptor("machine_learning_user", new String[] { "monitor_ml" },
|
||||
new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".ml-anomalies*",
|
||||
".ml-notifications").privileges("view_index_metadata", "read").build() },
|
||||
new RoleDescriptor.IndicesPrivileges[] {
|
||||
RoleDescriptor.IndicesPrivileges.builder().indices(".ml-anomalies*", ".ml-notifications*")
|
||||
.privileges("view_index_metadata", "read").build(),
|
||||
RoleDescriptor.IndicesPrivileges.builder().indices(".ml-annotations*")
|
||||
.privileges("view_index_metadata", "read", "write").build()
|
||||
},
|
||||
null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||
.put("machine_learning_admin", new RoleDescriptor("machine_learning_admin", new String[] { "manage_ml" },
|
||||
new RoleDescriptor.IndicesPrivileges[] {
|
||||
RoleDescriptor.IndicesPrivileges.builder().indices(".ml-*").privileges("view_index_metadata", "read")
|
||||
.build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||
RoleDescriptor.IndicesPrivileges.builder()
|
||||
.indices(".ml-anomalies*", ".ml-notifications*", ".ml-state*", ".ml-meta*")
|
||||
.privileges("view_index_metadata", "read").build(),
|
||||
RoleDescriptor.IndicesPrivileges.builder().indices(".ml-annotations*")
|
||||
.privileges("view_index_metadata", "read", "write").build()
|
||||
},
|
||||
null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||
.put("watcher_admin", new RoleDescriptor("watcher_admin", new String[] { "manage_watcher" },
|
||||
new RoleDescriptor.IndicesPrivileges[] {
|
||||
RoleDescriptor.IndicesPrivileges.builder().indices(Watch.INDEX, TriggeredWatchStoreField.INDEX_NAME,
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.ml.annotations;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class AnnotationTests extends AbstractSerializingTestCase<Annotation> {
|
||||
|
||||
@Override
|
||||
protected Annotation doParseInstance(XContentParser parser) {
|
||||
return Annotation.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Annotation createTestInstance() {
|
||||
return new Annotation(randomAlphaOfLengthBetween(100, 1000),
|
||||
new Date(randomNonNegativeLong()),
|
||||
randomAlphaOfLengthBetween(5, 20),
|
||||
new Date(randomNonNegativeLong()),
|
||||
randomBoolean() ? new Date(randomNonNegativeLong()) : null,
|
||||
randomBoolean() ? randomAlphaOfLengthBetween(10, 30) : null,
|
||||
randomBoolean() ? new Date(randomNonNegativeLong()) : null,
|
||||
randomBoolean() ? randomAlphaOfLengthBetween(5, 20) : null,
|
||||
randomAlphaOfLengthBetween(10, 15));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Writeable.Reader<Annotation> instanceReader() {
|
||||
return Annotation::new;
|
||||
}
|
||||
}
|
|
@ -106,7 +106,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
|
|||
Request createAirlineDataRequest = new Request("PUT", "/airline-data");
|
||||
createAirlineDataRequest.setJsonEntity("{"
|
||||
+ " \"mappings\": {"
|
||||
+ " \"response\": {"
|
||||
+ " \"_doc\": {"
|
||||
+ " \"properties\": {"
|
||||
+ " \"time\": { \"type\":\"date\"},"
|
||||
+ " \"airline\": { \"type\":\"keyword\"},"
|
||||
|
@ -116,10 +116,10 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
|
|||
+ " }"
|
||||
+ "}");
|
||||
client().performRequest(createAirlineDataRequest);
|
||||
Request airlineData1 = new Request("PUT", "/airline-data/response/1");
|
||||
Request airlineData1 = new Request("PUT", "/airline-data/_doc/1");
|
||||
airlineData1.setJsonEntity("{\"time\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}");
|
||||
client().performRequest(airlineData1);
|
||||
Request airlineData2 = new Request("PUT", "/airline-data/response/2");
|
||||
Request airlineData2 = new Request("PUT", "/airline-data/_doc/2");
|
||||
airlineData2.setJsonEntity("{\"time\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}");
|
||||
client().performRequest(airlineData2);
|
||||
|
||||
|
@ -265,7 +265,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
|
|||
xContentBuilder.startObject();
|
||||
xContentBuilder.field("job_id", jobId);
|
||||
xContentBuilder.array("indexes", "airline-data");
|
||||
xContentBuilder.array("types", "response");
|
||||
xContentBuilder.array("types", "_doc");
|
||||
xContentBuilder.field("_source", true);
|
||||
xContentBuilder.endObject();
|
||||
Request request = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
||||
|
|
|
@ -195,14 +195,14 @@ public class MlJobIT extends ESRestTestCase {
|
|||
|
||||
{ //create jobId1 docs
|
||||
String id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId1, "1234", 300);
|
||||
Request createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id);
|
||||
Request createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/_doc/" + id);
|
||||
createResultRequest.setJsonEntity(String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||
jobId1, "1234", 1));
|
||||
client().performRequest(createResultRequest);
|
||||
|
||||
id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId1, "1236", 300);
|
||||
createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id);
|
||||
createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/_doc/" + id);
|
||||
createResultRequest.setJsonEntity(String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||
jobId1, "1236", 1));
|
||||
|
@ -220,14 +220,14 @@ public class MlJobIT extends ESRestTestCase {
|
|||
}
|
||||
{ //create jobId2 docs
|
||||
String id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId2, "1234", 300);
|
||||
Request createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) + "/doc/" + id);
|
||||
Request createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) + "/_doc/" + id);
|
||||
createResultRequest.setJsonEntity(String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||
jobId2, "1234", 1));
|
||||
client().performRequest(createResultRequest);
|
||||
|
||||
id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId2, "1236", 300);
|
||||
createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) + "/doc/" + id);
|
||||
createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) + "/_doc/" + id);
|
||||
createResultRequest.setJsonEntity(String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||
jobId2, "1236", 1));
|
||||
|
@ -510,20 +510,20 @@ public class MlJobIT extends ESRestTestCase {
|
|||
assertThat(indicesBeforeDelete, containsString(indexName + "-002"));
|
||||
|
||||
// Add some documents to each index to make sure the DBQ clears them out
|
||||
Request createDoc0 = new Request("PUT", indexName + "/doc/" + 123);
|
||||
Request createDoc0 = new Request("PUT", indexName + "/_doc/" + 123);
|
||||
createDoc0.setJsonEntity(String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"bucket_span\":%d, \"result_type\":\"record\"}",
|
||||
jobId, 123, 1));
|
||||
client().performRequest(createDoc0);
|
||||
Request createDoc1 = new Request("PUT", indexName + "-001/doc/" + 123);
|
||||
Request createDoc1 = new Request("PUT", indexName + "-001/_doc/" + 123);
|
||||
createDoc1.setEntity(createDoc0.getEntity());
|
||||
client().performRequest(createDoc1);
|
||||
Request createDoc2 = new Request("PUT", indexName + "-002/doc/" + 123);
|
||||
Request createDoc2 = new Request("PUT", indexName + "-002/_doc/" + 123);
|
||||
createDoc2.setEntity(createDoc0.getEntity());
|
||||
client().performRequest(createDoc2);
|
||||
|
||||
// Also index a few through the alias for the first job
|
||||
Request createDoc3 = new Request("PUT", indexName + "/doc/" + 456);
|
||||
Request createDoc3 = new Request("PUT", indexName + "/_doc/" + 456);
|
||||
createDoc3.setEntity(createDoc0.getEntity());
|
||||
client().performRequest(createDoc3);
|
||||
|
||||
|
|
|
@ -182,7 +182,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
|||
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0);
|
||||
|
||||
createIndex("painless", settings.build());
|
||||
Request createDoc = new Request("PUT", "/painless/test/1");
|
||||
Request createDoc = new Request("PUT", "/painless/_doc/1");
|
||||
createDoc.setJsonEntity("{\"test\": \"test\"}");
|
||||
createDoc.addParameter("refresh", "true");
|
||||
client().performRequest(createDoc);
|
||||
|
@ -262,7 +262,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
|||
.put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1)
|
||||
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0);
|
||||
|
||||
createIndex("painless", settings.build(), "\"test\": { \"properties\": { \"domain\": { \"type\": \"keyword\" }," +
|
||||
createIndex("painless", settings.build(), "\"_doc\": { \"properties\": { \"domain\": { \"type\": \"keyword\" }," +
|
||||
"\"time\": { \"type\": \"date\" } } }");
|
||||
|
||||
// Index some data
|
||||
|
@ -280,13 +280,13 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
|||
if (i == 64) {
|
||||
// Anomaly has 100 docs, but we don't care about the value
|
||||
for (int j = 0; j < 100; j++) {
|
||||
Request createDocRequest = new Request("PUT", "/painless/test/" + time.toDateTimeISO() + "_" + j);
|
||||
Request createDocRequest = new Request("PUT", "/painless/_doc/" + time.toDateTimeISO() + "_" + j);
|
||||
createDocRequest.setJsonEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.toDateTimeISO() + "\"}");
|
||||
client().performRequest(createDocRequest);
|
||||
}
|
||||
} else {
|
||||
// Non-anomalous values will be what's seen when the anomaly is reported
|
||||
Request createDocRequest = new Request("PUT", "/painless/test/" + time.toDateTimeISO());
|
||||
Request createDocRequest = new Request("PUT", "/painless/_doc/" + time.toDateTimeISO());
|
||||
createDocRequest.setJsonEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.toDateTimeISO() + "\"}");
|
||||
client().performRequest(createDocRequest);
|
||||
}
|
||||
|
@ -300,7 +300,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
|||
"{\n" +
|
||||
" \"job_id\":\"hrd-split-job\",\n" +
|
||||
" \"indexes\":[\"painless\"],\n" +
|
||||
" \"types\":[\"test\"],\n" +
|
||||
" \"types\":[\"_doc\"],\n" +
|
||||
" \"script_fields\": {\n" +
|
||||
" \"domain_split\": {\n" +
|
||||
" \"script\": \"return domainSplit(doc['domain'].value, params);\"\n" +
|
||||
|
|
|
@ -424,7 +424,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
|
|||
jobResultsProvider,
|
||||
jobManager,
|
||||
autodetectProcessManager,
|
||||
new MlInitializationService(threadPool, clusterService, client),
|
||||
new MlInitializationService(settings, threadPool, clusterService, client),
|
||||
jobDataCountsPersister,
|
||||
datafeedManager,
|
||||
auditor,
|
||||
|
|
|
@ -5,23 +5,32 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterStateListener;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.component.LifecycleListener;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex;
|
||||
|
||||
class MlInitializationService implements ClusterStateListener {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(MlInitializationService.class);
|
||||
|
||||
private final Settings settings;
|
||||
private final ThreadPool threadPool;
|
||||
private final ClusterService clusterService;
|
||||
private final Client client;
|
||||
|
||||
private volatile MlDailyMaintenanceService mlDailyMaintenanceService;
|
||||
|
||||
MlInitializationService(ThreadPool threadPool, ClusterService clusterService, Client client) {
|
||||
MlInitializationService(Settings settings, ThreadPool threadPool, ClusterService clusterService, Client client) {
|
||||
this.settings = settings;
|
||||
this.threadPool = threadPool;
|
||||
this.clusterService = clusterService;
|
||||
this.client = client;
|
||||
|
@ -37,6 +46,13 @@ class MlInitializationService implements ClusterStateListener {
|
|||
|
||||
if (event.localNodeMaster()) {
|
||||
installDailyMaintenanceService();
|
||||
AnnotationIndex.createAnnotationsIndex(settings, client, event.state(), ActionListener.wrap(
|
||||
r -> {
|
||||
if (r) {
|
||||
logger.info("Created ML annotations index and aliases");
|
||||
}
|
||||
},
|
||||
e -> logger.error("Error creating ML annotations index or aliases", e)));
|
||||
} else {
|
||||
uninstallDailyMaintenanceService();
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -61,7 +62,7 @@ public class MlInitializationServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInitialize() {
|
||||
MlInitializationService initializationService = new MlInitializationService(threadPool, clusterService, client);
|
||||
MlInitializationService initializationService = new MlInitializationService(Settings.EMPTY, threadPool, clusterService, client);
|
||||
|
||||
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
|
@ -76,7 +77,7 @@ public class MlInitializationServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInitialize_noMasterNode() {
|
||||
MlInitializationService initializationService = new MlInitializationService(threadPool, clusterService, client);
|
||||
MlInitializationService initializationService = new MlInitializationService(Settings.EMPTY, threadPool, clusterService, client);
|
||||
|
||||
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
|
@ -89,7 +90,7 @@ public class MlInitializationServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInitialize_alreadyInitialized() {
|
||||
MlInitializationService initializationService = new MlInitializationService(threadPool, clusterService, client);
|
||||
MlInitializationService initializationService = new MlInitializationService(Settings.EMPTY, threadPool, clusterService, client);
|
||||
|
||||
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
|
@ -107,7 +108,7 @@ public class MlInitializationServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testNodeGoesFromMasterToNonMasterAndBack() {
|
||||
MlInitializationService initializationService = new MlInitializationService(threadPool, clusterService, client);
|
||||
MlInitializationService initializationService = new MlInitializationService(Settings.EMPTY, threadPool, clusterService, client);
|
||||
MlDailyMaintenanceService initialDailyMaintenanceService = mock(MlDailyMaintenanceService.class);
|
||||
initializationService.setDailyMaintenanceService(initialDailyMaintenanceService);
|
||||
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.integration;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.xpack.core.XPackSettings;
|
||||
import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex;
|
||||
import org.elasticsearch.xpack.ml.LocalStateMachineLearning;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.elasticsearch.xpack.ml.MlSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.ml.notifications.Auditor;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
public class AnnotationIndexIT extends MlSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings() {
|
||||
Settings.Builder newSettings = Settings.builder();
|
||||
newSettings.put(super.nodeSettings());
|
||||
newSettings.put(XPackSettings.MONITORING_ENABLED.getKey(), false);
|
||||
newSettings.put(XPackSettings.SECURITY_ENABLED.getKey(), false);
|
||||
newSettings.put(XPackSettings.WATCHER_ENABLED.getKey(), false);
|
||||
return newSettings.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(LocalStateMachineLearning.class);
|
||||
}
|
||||
|
||||
// TODO remove this when the jindex feature branches are merged, as this is in the base class then
|
||||
@Before
|
||||
public void waitForMlTemplates() throws Exception {
|
||||
// Block until the templates are installed
|
||||
assertBusy(() -> {
|
||||
ClusterState state = client().admin().cluster().prepareState().get().getState();
|
||||
assertTrue("Timed out waiting for the ML templates to be installed",
|
||||
MachineLearning.allTemplatesInstalled(state));
|
||||
});
|
||||
}
|
||||
|
||||
public void testNotCreatedWhenNoOtherMlIndices() {
|
||||
|
||||
// Ask a few times to increase the chance of failure if the .ml-annotations index is created when no other ML index exists
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
assertFalse(annotationsIndexExists());
|
||||
assertEquals(0, numberOfAnnotationsAliases());
|
||||
}
|
||||
}
|
||||
|
||||
public void testCreatedWhenAfterOtherMlIndex() throws Exception {
|
||||
|
||||
Auditor auditor = new Auditor(client(), "node_1");
|
||||
auditor.info("whatever", "blah");
|
||||
|
||||
// Creating a document in the .ml-notifications index should cause .ml-annotations
|
||||
// to be created, as it should get created as soon as any other ML index exists
|
||||
|
||||
assertBusy(() -> {
|
||||
assertTrue(annotationsIndexExists());
|
||||
assertEquals(2, numberOfAnnotationsAliases());
|
||||
});
|
||||
}
|
||||
|
||||
private boolean annotationsIndexExists() {
|
||||
return client().admin().indices().prepareExists(AnnotationIndex.INDEX_NAME).get().isExists();
|
||||
}
|
||||
|
||||
private int numberOfAnnotationsAliases() {
|
||||
int count = 0;
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = client().admin().indices()
|
||||
.prepareGetAliases(AnnotationIndex.READ_ALIAS_NAME, AnnotationIndex.WRITE_ALIAS_NAME).get().getAliases();
|
||||
if (aliases != null) {
|
||||
for (ObjectObjectCursor<String, List<AliasMetaData>> entry : aliases) {
|
||||
count += entry.value.size();
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
}
|
|
@ -10,6 +10,7 @@ import org.elasticsearch.common.CheckedConsumer;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.rest.action.document.RestIndexAction;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.xpack.sql.qa.cli.EmbeddedCli.SecurityConfig;
|
||||
import org.junit.After;
|
||||
|
@ -59,6 +60,7 @@ public abstract class CliIntegrationTestCase extends ESRestTestCase {
|
|||
protected void index(String index, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
|
||||
Request request = new Request("PUT", "/" + index + "/doc/1");
|
||||
request.addParameter("refresh", "true");
|
||||
request.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
|
||||
body.accept(builder);
|
||||
builder.endObject();
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.rest.action.document.RestIndexAction;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.xpack.sql.jdbc.EsDataSource;
|
||||
import org.junit.After;
|
||||
|
@ -85,6 +86,7 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
|
|||
public static void index(String index, String documentId, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
|
||||
Request request = new Request("PUT", "/" + index + "/doc/" + documentId);
|
||||
request.addParameter("refresh", "true");
|
||||
request.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE));
|
||||
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
|
||||
body.accept(builder);
|
||||
builder.endObject();
|
||||
|
|
|
@ -720,7 +720,7 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe
|
|||
}
|
||||
|
||||
private void index(String... docs) throws IOException {
|
||||
Request request = new Request("POST", "/test/test/_bulk");
|
||||
Request request = new Request("POST", "/test/_doc/_bulk");
|
||||
request.addParameter("refresh", "true");
|
||||
StringBuilder bulk = new StringBuilder();
|
||||
for (String doc : docs) {
|
||||
|
|
|
@ -38,7 +38,7 @@ public abstract class RestSqlUsageTestCase extends ESRestTestCase {
|
|||
);
|
||||
|
||||
private enum ClientType {
|
||||
CANVAS, CLI, JDBC, ODBC, REST;
|
||||
CANVAS, CLI, JDBC, ODBC, ODBC32, ODBC64, REST;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
@ -84,7 +84,7 @@ public abstract class RestSqlUsageTestCase extends ESRestTestCase {
|
|||
baseMetrics.put(metric.toString(), (Integer) featuresMetrics.get(metric.toString()));
|
||||
}
|
||||
|
||||
// initialize the "base" metric values with whatever values are already recorder on ES
|
||||
// initialize the "base" metric values with whatever values are already recorded on ES
|
||||
baseClientTypeTotalQueries = ((Map<String,Integer>) queriesMetrics.get(clientType)).get("total");
|
||||
baseClientTypeFailedQueries = ((Map<String,Integer>) queriesMetrics.get(clientType)).get("failed");
|
||||
baseAllTotalQueries = ((Map<String,Integer>) queriesMetrics.get("_all")).get("total");
|
||||
|
@ -252,8 +252,14 @@ public abstract class RestSqlUsageTestCase extends ESRestTestCase {
|
|||
}
|
||||
|
||||
private void runSql(String sql) throws IOException {
|
||||
String mode = (clientType.equals(ClientType.JDBC.toString()) || clientType.equals(ClientType.ODBC.toString())) ?
|
||||
clientType.toString() : Mode.PLAIN.toString();
|
||||
String mode = Mode.PLAIN.toString();
|
||||
if (clientType.equals(ClientType.JDBC.toString())) {
|
||||
mode = Mode.JDBC.toString();
|
||||
}
|
||||
if (clientType.startsWith(ClientType.ODBC.toString())) {
|
||||
mode = Mode.ODBC.toString();
|
||||
}
|
||||
|
||||
runSql(mode, clientType, sql);
|
||||
}
|
||||
|
||||
|
|
|
@ -6,15 +6,34 @@
|
|||
|
||||
package org.elasticsearch.xpack.sql.proto;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
public class RequestInfo {
|
||||
public static final String CLI = "cli";
|
||||
private static final String CANVAS = "canvas";
|
||||
public static final List<String> CLIENT_IDS = Arrays.asList(CLI, CANVAS);
|
||||
public static final String ODBC_32 = "odbc32";
|
||||
private static final String ODBC_64 = "odbc64";
|
||||
public static final Set<String> CLIENT_IDS;
|
||||
public static final Set<String> ODBC_CLIENT_IDS;
|
||||
|
||||
static {
|
||||
Set<String> clientIds = new HashSet<>(4);
|
||||
clientIds.add(CLI);
|
||||
clientIds.add(CANVAS);
|
||||
clientIds.add(ODBC_32);
|
||||
clientIds.add(ODBC_64);
|
||||
|
||||
Set<String> odbcClientIds = new HashSet<>(2);
|
||||
odbcClientIds.add(ODBC_32);
|
||||
odbcClientIds.add(ODBC_64);
|
||||
|
||||
CLIENT_IDS = Collections.unmodifiableSet(clientIds);
|
||||
ODBC_CLIENT_IDS = Collections.unmodifiableSet(odbcClientIds);
|
||||
}
|
||||
|
||||
private Mode mode;
|
||||
private String clientId;
|
||||
|
|
|
@ -56,7 +56,7 @@ public class RestSqlQueryAction extends BaseRestHandler {
|
|||
SqlQueryRequest sqlRequest;
|
||||
try (XContentParser parser = request.contentOrSourceParamParser()) {
|
||||
sqlRequest = SqlQueryRequest.fromXContent(parser);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Since we support {@link TextFormat} <strong>and</strong>
|
||||
|
|
|
@ -15,6 +15,9 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.proto.RequestInfo.ODBC_CLIENT_IDS;
|
||||
import static org.elasticsearch.xpack.sql.stats.QueryMetric.ODBC;
|
||||
|
||||
/**
|
||||
* Class encapsulating the metrics collected for ES SQL
|
||||
*/
|
||||
|
@ -101,9 +104,18 @@ public class Metrics {
|
|||
|
||||
// queries metrics
|
||||
for (Entry<QueryMetric, Map<OperationType, CounterMetric>> entry : opsByTypeMetrics.entrySet()) {
|
||||
String metricName = entry.getKey().toString();
|
||||
|
||||
for (OperationType type : OperationType.values()) {
|
||||
counters.inc(QPREFIX + entry.getKey().toString() + "." + type.toString(), entry.getValue().get(type).count());
|
||||
counters.inc(QPREFIX + "_all." + type.toString(), entry.getValue().get(type).count());
|
||||
long metricCounter = entry.getValue().get(type).count();
|
||||
String operationTypeName = type.toString();
|
||||
|
||||
counters.inc(QPREFIX + metricName + "." + operationTypeName, metricCounter);
|
||||
counters.inc(QPREFIX + "_all." + operationTypeName, metricCounter);
|
||||
// compute the ODBC total metric
|
||||
if (ODBC_CLIENT_IDS.contains(metricName)) {
|
||||
counters.inc(QPREFIX + ODBC.toString() + "." + operationTypeName, metricCounter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,12 +7,14 @@
|
|||
package org.elasticsearch.xpack.sql.stats;
|
||||
|
||||
import org.elasticsearch.xpack.sql.proto.Mode;
|
||||
import org.elasticsearch.xpack.sql.proto.RequestInfo;
|
||||
|
||||
import java.util.Locale;
|
||||
import static org.elasticsearch.xpack.sql.proto.RequestInfo.ODBC_CLIENT_IDS;
|
||||
|
||||
public enum QueryMetric {
|
||||
CANVAS, CLI, JDBC, ODBC, REST;
|
||||
|
||||
CANVAS, CLI, JDBC, ODBC, ODBC32, ODBC64, REST;
|
||||
|
||||
public static QueryMetric fromString(String metric) {
|
||||
try {
|
||||
return QueryMetric.valueOf(metric.toUpperCase(Locale.ROOT));
|
||||
|
@ -27,6 +29,14 @@ public enum QueryMetric {
|
|||
}
|
||||
|
||||
public static QueryMetric from(Mode mode, String clientId) {
|
||||
if (mode == Mode.ODBC) {
|
||||
// default to "odbc_32" if the client_id is not provided or it has a wrong value
|
||||
if (clientId == null || false == ODBC_CLIENT_IDS.contains(clientId)) {
|
||||
return fromString(RequestInfo.ODBC_32);
|
||||
} else {
|
||||
return fromString(clientId);
|
||||
}
|
||||
}
|
||||
return fromString(mode == Mode.PLAIN ? clientId : mode.toString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
---
|
||||
setup:
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: my_test_index
|
||||
type: doc
|
||||
id: my_id
|
||||
refresh: true
|
||||
body: >
|
||||
{
|
||||
"key": "value"
|
||||
}
|
||||
|
||||
---
|
||||
"Test search input includes hits by default":
|
||||
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
body: >
|
||||
{
|
||||
"watch" : {
|
||||
"trigger": {
|
||||
"schedule" : { "cron" : "0 0 0 1 * ? 2099" }
|
||||
},
|
||||
"input": {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : [ "my_test_index" ],
|
||||
"body" : {
|
||||
"query": {
|
||||
"match_all" : {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"condition": {
|
||||
"compare": {
|
||||
"ctx.payload.hits.total": {
|
||||
"gt": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"actions": {
|
||||
"logging" : {
|
||||
"logging" : {
|
||||
"text" : "Logging from a test"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- match: { watch_record.result.condition.met: true }
|
||||
|
||||
|
||||
---
|
||||
"Test search transform includes hits by default":
|
||||
|
||||
- do:
|
||||
xpack.watcher.execute_watch:
|
||||
body: >
|
||||
{
|
||||
"watch" : {
|
||||
"trigger": {
|
||||
"schedule" : { "cron" : "0 0 0 1 * ? 2099" }
|
||||
},
|
||||
"input": {
|
||||
"simple": {
|
||||
"foo": "bar"
|
||||
}
|
||||
},
|
||||
"transform" : {
|
||||
"search" : {
|
||||
"request" : {
|
||||
"indices" : [ "my_test_index" ],
|
||||
"body" : {
|
||||
"query": {
|
||||
"match_all" : {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"actions": {
|
||||
"indexme" : {
|
||||
"condition": {
|
||||
"compare": {
|
||||
"ctx.payload.hits.total": {
|
||||
"gt": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"index" : {
|
||||
"index" : "my_test_index",
|
||||
"doc_type" : "doc",
|
||||
"doc_id": "my-id"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: my_test_index
|
||||
type: doc
|
||||
id: my_id
|
||||
|
||||
- match: { _source.key: "value" }
|
||||
|
|
@ -41,7 +41,7 @@ public class WatcherSearchTemplateRequest implements ToXContentObject {
|
|||
private final IndicesOptions indicesOptions;
|
||||
private final Script template;
|
||||
private final BytesReference searchSource;
|
||||
private boolean restTotalHitsAsInt;
|
||||
private boolean restTotalHitsAsInt = true;
|
||||
|
||||
public WatcherSearchTemplateRequest(String[] indices, String[] types, SearchType searchType, IndicesOptions indicesOptions,
|
||||
BytesReference searchSource) {
|
||||
|
@ -184,7 +184,8 @@ public class WatcherSearchTemplateRequest implements ToXContentObject {
|
|||
IndicesOptions indicesOptions = DEFAULT_INDICES_OPTIONS;
|
||||
BytesReference searchSource = null;
|
||||
Script template = null;
|
||||
boolean totalHitsAsInt = false;
|
||||
// TODO this is to retain BWC compatibility in 7.0 and can be removed for 8.0
|
||||
boolean totalHitsAsInt = true;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
|
|
|
@ -15,6 +15,7 @@ import java.util.Map;
|
|||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class WatcherSearchTemplateRequestTests extends ESTestCase {
|
||||
|
||||
|
@ -28,7 +29,26 @@ public class WatcherSearchTemplateRequestTests extends ESTestCase {
|
|||
assertTemplate(source, "custom-script", "painful", singletonMap("bar", "baz"));
|
||||
}
|
||||
|
||||
private void assertTemplate(String source, String expectedScript, String expectedLang, Map<String, Object> expectedParams) {
|
||||
public void testDefaultHitCountsDefaults() throws IOException {
|
||||
assertHitCount("{}", true);
|
||||
}
|
||||
|
||||
public void testDefaultHitCountsConfigured() throws IOException {
|
||||
boolean hitCountsAsInt = randomBoolean();
|
||||
String source = "{ \"rest_total_hits_as_int\" : " + hitCountsAsInt + " }";
|
||||
assertHitCount(source, hitCountsAsInt);
|
||||
}
|
||||
|
||||
private void assertHitCount(String source, boolean expectedHitCountAsInt) throws IOException {
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
|
||||
parser.nextToken();
|
||||
WatcherSearchTemplateRequest request = WatcherSearchTemplateRequest.fromXContent(parser, SearchType.QUERY_THEN_FETCH);
|
||||
assertThat(request.isRestTotalHitsAsint(), is(expectedHitCountAsInt));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertTemplate(String source, String expectedScript, String expectedLang, Map<String, Object> expectedParams)
|
||||
throws IOException {
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
|
||||
parser.nextToken();
|
||||
WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values()));
|
||||
|
@ -36,8 +56,6 @@ public class WatcherSearchTemplateRequestTests extends ESTestCase {
|
|||
assertThat(result.getTemplate().getIdOrCode(), equalTo(expectedScript));
|
||||
assertThat(result.getTemplate().getLang(), equalTo(expectedLang));
|
||||
assertThat(result.getTemplate().getParams(), equalTo(expectedParams));
|
||||
} catch (IOException e) {
|
||||
fail("Failed to parse watch search request: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -340,7 +340,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase
|
|||
assertThat("could not find executed watch record for watch " + watchName, searchResponse.getHits().getTotalHits().value,
|
||||
greaterThanOrEqualTo(minimumExpectedWatchActionsWithActionPerformed));
|
||||
if (assertConditionMet) {
|
||||
assertThat((Integer) XContentMapValues.extractValue("result.input.payload.hits.total.value",
|
||||
assertThat((Integer) XContentMapValues.extractValue("result.input.payload.hits.total",
|
||||
searchResponse.getHits().getAt(0).getSourceAsMap()), greaterThanOrEqualTo(1));
|
||||
}
|
||||
});
|
||||
|
|
|
@ -75,7 +75,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval(5, IntervalSchedule.Interval.Unit.SECONDS)))
|
||||
.input(searchInput(request))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.EQ, 1L))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L))
|
||||
.addAction("_logger", loggingAction("_logging")
|
||||
.setCategory("_category")))
|
||||
.get();
|
||||
|
@ -95,7 +95,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval(5, IntervalSchedule.Interval.Unit.SECONDS)))
|
||||
.input(searchInput(searchRequest))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.EQ, 1L)))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L)))
|
||||
.get();
|
||||
timeWarp().trigger("_name");
|
||||
// The watch's condition won't meet because there is no data that matches with the query
|
||||
|
@ -119,7 +119,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
.setSource(watchBuilder()
|
||||
.trigger(schedule(cron("0/1 * * * * ? 2020")))
|
||||
.input(searchInput(searchRequest))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.EQ, 1L)))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L)))
|
||||
.get();
|
||||
assertThat(indexResponse.isCreated(), is(true));
|
||||
DeleteWatchResponse deleteWatchResponse = watcherClient.prepareDeleteWatch("_name").get();
|
||||
|
@ -180,7 +180,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
.addAction("_id", indexAction("idx", "action"));
|
||||
|
||||
watcherClient().preparePutWatch("_name")
|
||||
.setSource(source.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.EQ, 1L)))
|
||||
.setSource(source.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L)))
|
||||
.get();
|
||||
|
||||
timeWarp().clock().fastForwardSeconds(5);
|
||||
|
@ -188,7 +188,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
assertWatchWithMinimumPerformedActionsCount("_name", 0, false);
|
||||
|
||||
watcherClient().preparePutWatch("_name")
|
||||
.setSource(source.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.EQ, 0L)))
|
||||
.setSource(source.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 0L)))
|
||||
.get();
|
||||
|
||||
timeWarp().clock().fastForwardSeconds(5);
|
||||
|
@ -199,7 +199,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
watcherClient().preparePutWatch("_name")
|
||||
.setSource(source
|
||||
.trigger(schedule(Schedules.cron("0/1 * * * * ? 2020")))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.EQ, 0L)))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 0L)))
|
||||
.get();
|
||||
|
||||
timeWarp().clock().fastForwardSeconds(5);
|
||||
|
@ -245,7 +245,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval(5, IntervalSchedule.Interval.Unit.SECONDS)))
|
||||
.input(searchInput(request).extractKeys("hits.total.value"))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.EQ, 1L)))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L)))
|
||||
.get();
|
||||
// in this watcher the condition will fail, because max_score isn't extracted, only total:
|
||||
watcherClient.preparePutWatch("_name2")
|
||||
|
@ -265,7 +265,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
SearchResponse searchResponse = searchWatchRecords(builder -> builder.setQuery(matchQuery("watch_id", "_name1")));
|
||||
assertHitCount(searchResponse, 1);
|
||||
XContentSource source = xContentSource(searchResponse.getHits().getAt(0).getSourceRef());
|
||||
assertThat(source.getValue("result.input.payload.hits.total.value"), equalTo((Object) 1));
|
||||
assertThat(source.getValue("result.input.payload.hits.total"), equalTo((Object) 1));
|
||||
}
|
||||
|
||||
public void testPutWatchWithNegativeSchedule() throws Exception {
|
||||
|
@ -349,7 +349,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
|
|||
.setSource(watchBuilder()
|
||||
.trigger(schedule(interval("5s")))
|
||||
.input(searchInput(request))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.GTE, 3L)))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.GTE, 3L)))
|
||||
.get();
|
||||
|
||||
logger.info("created watch [{}] at [{}]", watchName, new DateTime(Clock.systemUTC().millis()));
|
||||
|
|
|
@ -67,7 +67,7 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase {
|
|||
.setSource(watchBuilder()
|
||||
.trigger(schedule(cron("0/5 * * * * ? *")))
|
||||
.input(searchInput(templateRequest(searchSource(), "events")))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.GT, 0L))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.GT, 0L))
|
||||
.transform(searchTransform(templateRequest(searchSource(), "events")))
|
||||
.addAction("_a1", indexAction("actions1", "doc"))
|
||||
.addAction("_a2", indexAction("actions2", "doc"))
|
||||
|
@ -127,7 +127,7 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase {
|
|||
.setSource(watchBuilder()
|
||||
.trigger(schedule(cron("0/5 * * * * ? *")))
|
||||
.input(searchInput(templateRequest(searchSource(), "events")))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.GT, 0L))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.GT, 0L))
|
||||
.transform(searchTransform(templateRequest(searchSource(), "events")))
|
||||
.addAction("_a1", indexAction("actions1", "doc"))
|
||||
.addAction("_a2", indexAction("actions2", "doc"))
|
||||
|
@ -195,7 +195,7 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase {
|
|||
.setSource(watchBuilder()
|
||||
.trigger(schedule(cron("0/5 * * * * ? *")))
|
||||
.input(searchInput(templateRequest(searchSource(), "events")))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total.value", CompareCondition.Op.GT, 0L))
|
||||
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.GT, 0L))
|
||||
.transform(searchTransform(templateRequest(searchSource(), "events")))
|
||||
.addAction("_id", indexAction("actions", "action")))
|
||||
.get();
|
||||
|
|
|
@ -73,7 +73,7 @@ public class GlobalCheckpointSyncActionIT extends ESRestTestCase {
|
|||
builder.field("foo", i);
|
||||
}
|
||||
builder.endObject();
|
||||
Request indexRequest = new Request("PUT", "/test-index/test-type/" + i);
|
||||
Request indexRequest = new Request("PUT", "/test-index/_doc/" + i);
|
||||
indexRequest.setJsonEntity(Strings.toString(builder));
|
||||
client().performRequest(indexRequest);
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ public class SmokeTestWatcherWithSecurityClientYamlTestSuiteIT extends ESClientY
|
|||
emptyList(), emptyMap());
|
||||
|
||||
// create one document in this index, so we can test in the YAML tests, that the index cannot be accessed
|
||||
Request request = new Request("PUT", "/index_not_allowed_to_read/doc/1");
|
||||
Request request = new Request("PUT", "/index_not_allowed_to_read/_doc/1");
|
||||
request.setJsonEntity("{\"foo\":\"bar\"}");
|
||||
adminClient().performRequest(request);
|
||||
|
||||
|
|
Loading…
Reference in New Issue