Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
a771478940
|
@ -20,10 +20,14 @@ package org.elasticsearch.gradle.precommit
|
|||
|
||||
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
|
||||
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
|
||||
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.plugins.quality.Checkstyle
|
||||
import org.gradle.api.tasks.JavaExec
|
||||
import org.gradle.api.tasks.StopExecutionException
|
||||
|
||||
/**
|
||||
* Validation tasks which should be run before committing. These run before tests.
|
||||
|
@ -40,7 +44,11 @@ class PrecommitTasks {
|
|||
project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
|
||||
project.tasks.create('filepermissions', FilePermissionsTask.class),
|
||||
project.tasks.create('jarHell', JarHellTask.class),
|
||||
project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)]
|
||||
project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)
|
||||
]
|
||||
|
||||
// Configure it but don't add it as a dependency yet
|
||||
configureForbiddenApisCli(project)
|
||||
|
||||
// tasks with just tests don't need dependency licenses, so this flag makes adding
|
||||
// the task optional
|
||||
|
@ -96,9 +104,58 @@ class PrecommitTasks {
|
|||
}
|
||||
Task forbiddenApis = project.tasks.findByName('forbiddenApis')
|
||||
forbiddenApis.group = "" // clear group, so this does not show up under verification tasks
|
||||
|
||||
return forbiddenApis
|
||||
}
|
||||
|
||||
private static Task configureForbiddenApisCli(Project project) {
|
||||
project.configurations.create("forbiddenApisCliJar")
|
||||
project.dependencies {
|
||||
forbiddenApisCliJar 'de.thetaphi:forbiddenapis:2.5'
|
||||
}
|
||||
Task forbiddenApisCli = project.tasks.create('forbiddenApisCli')
|
||||
|
||||
project.sourceSets.forEach { sourceSet ->
|
||||
forbiddenApisCli.dependsOn(
|
||||
project.tasks.create(sourceSet.getTaskName('forbiddenApisCli', null), JavaExec) {
|
||||
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
|
||||
dependsOn(buildResources)
|
||||
classpath = project.files(
|
||||
project.configurations.forbiddenApisCliJar,
|
||||
sourceSet.compileClasspath,
|
||||
sourceSet.runtimeClasspath
|
||||
)
|
||||
main = 'de.thetaphi.forbiddenapis.cli.CliMain'
|
||||
executable = "${project.runtimeJavaHome}/bin/java"
|
||||
args "-b", 'jdk-unsafe-1.8'
|
||||
args "-b", 'jdk-deprecated-1.8'
|
||||
args "-b", 'jdk-non-portable'
|
||||
args "-b", 'jdk-system-out'
|
||||
args "-f", buildResources.copy("forbidden/jdk-signatures.txt")
|
||||
args "-f", buildResources.copy("forbidden/es-all-signatures.txt")
|
||||
args "--suppressannotation", '**.SuppressForbidden'
|
||||
if (sourceSet.name == 'test') {
|
||||
args "-f", buildResources.copy("forbidden/es-test-signatures.txt")
|
||||
args "-f", buildResources.copy("forbidden/http-signatures.txt")
|
||||
} else {
|
||||
args "-f", buildResources.copy("forbidden/es-server-signatures.txt")
|
||||
}
|
||||
dependsOn sourceSet.classesTaskName
|
||||
doFirst {
|
||||
// Forbidden APIs expects only existing dirs, and requires at least one
|
||||
FileCollection existingOutputs = sourceSet.output.classesDirs
|
||||
.filter { it.exists() }
|
||||
if (existingOutputs.isEmpty()) {
|
||||
throw new StopExecutionException("${sourceSet.name} has no outputs")
|
||||
}
|
||||
existingOutputs.forEach { args "-d", it }
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
return forbiddenApisCli
|
||||
}
|
||||
|
||||
private static Task configureCheckstyle(Project project) {
|
||||
// Always copy the checkstyle configuration files to 'buildDir/checkstyle' since the resources could be located in a jar
|
||||
// file. If the resources are located in a jar, Gradle will fail when it tries to turn the URL into a file
|
||||
|
|
|
@ -22,15 +22,14 @@ package org.elasticsearch.gradle.test
|
|||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.tasks.compile.JavaCompile
|
||||
|
||||
/**
|
||||
* Configures the build to compile tests against Elasticsearch's test framework
|
||||
* and run REST tests. Use BuildPlugin if you want to build main code as well
|
||||
|
@ -48,6 +47,7 @@ public class StandaloneRestTestPlugin implements Plugin<Project> {
|
|||
project.pluginManager.apply(JavaBasePlugin)
|
||||
project.pluginManager.apply(RandomizedTestingPlugin)
|
||||
|
||||
project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask)
|
||||
BuildPlugin.globalBuildInfo(project)
|
||||
BuildPlugin.configureRepositories(project)
|
||||
|
||||
|
|
|
@ -526,7 +526,11 @@ class VagrantTestPlugin implements Plugin<Project> {
|
|||
project.gradle.removeListener(batsPackagingReproListener)
|
||||
}
|
||||
if (project.extensions.esvagrant.boxes.contains(box)) {
|
||||
packagingTest.dependsOn(batsPackagingTest)
|
||||
// these tests are temporarily disabled for suse boxes while we debug an issue
|
||||
// https://github.com/elastic/elasticsearch/issues/30295
|
||||
if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) {
|
||||
packagingTest.dependsOn(batsPackagingTest)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -565,7 +569,11 @@ class VagrantTestPlugin implements Plugin<Project> {
|
|||
project.gradle.removeListener(javaPackagingReproListener)
|
||||
}
|
||||
if (project.extensions.esvagrant.boxes.contains(box)) {
|
||||
packagingTest.dependsOn(javaPackagingTest)
|
||||
// these tests are temporarily disabled for suse boxes while we debug an issue
|
||||
// https://github.com/elastic/elasticsearch/issues/30295
|
||||
if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) {
|
||||
packagingTest.dependsOn(javaPackagingTest)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.gradle;
|
|||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.file.DirectoryProperty;
|
||||
import org.gradle.api.file.RegularFileProperty;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
import org.gradle.api.tasks.Classpath;
|
||||
|
@ -31,6 +30,7 @@ import org.gradle.api.tasks.SkipWhenEmpty;
|
|||
import org.gradle.api.tasks.StopExecutionException;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
@ -82,14 +82,14 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
|
|||
this.outputDir = outputDir;
|
||||
}
|
||||
|
||||
public RegularFileProperty take(String resource) {
|
||||
public File copy(String resource) {
|
||||
if (getState().getExecuted() || getState().getExecuting()) {
|
||||
throw new GradleException("buildResources can't be configured after the task ran. " +
|
||||
"Make sure task is not used after configuration time"
|
||||
);
|
||||
}
|
||||
resources.add(resource);
|
||||
return getProject().getLayout().fileProperty(outputDir.file(resource));
|
||||
return outputDir.file(resource).get().getAsFile();
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
|
@ -101,12 +101,13 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
|
|||
.forEach(resourcePath -> {
|
||||
Path destination = outputDir.get().file(resourcePath).getAsFile().toPath();
|
||||
try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) {
|
||||
Files.createDirectories(destination.getParent());
|
||||
if (is == null) {
|
||||
throw new GradleException("Can't export `" + resourcePath + "` from build-tools: not found");
|
||||
}
|
||||
Files.copy(is, destination);
|
||||
} catch (IOException e) {
|
||||
throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination);
|
||||
throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination, e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
|
|||
import org.gradle.testkit.runner.BuildResult;
|
||||
import org.gradle.testkit.runner.GradleRunner;
|
||||
|
||||
|
||||
public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTestCase {
|
||||
|
||||
public static final String PROJECT_NAME = "elasticsearch-build-resources";
|
||||
|
@ -59,6 +60,7 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe
|
|||
.withArguments("clean", "sampleCopyAll", "-s", "-i")
|
||||
.withPluginClasspath()
|
||||
.build();
|
||||
|
||||
assertTaskSuccessfull(result, ":buildResources");
|
||||
assertTaskSuccessfull(result, ":sampleCopyAll");
|
||||
assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle.xml");
|
||||
|
|
|
@ -6,7 +6,7 @@ ext.licenseFile = file("$buildDir/dummy/license")
|
|||
ext.noticeFile = file("$buildDir/dummy/notice")
|
||||
|
||||
buildResources {
|
||||
take 'checkstyle.xml'
|
||||
copy 'checkstyle.xml'
|
||||
}
|
||||
|
||||
task sampleCopyAll(type: Sync) {
|
||||
|
@ -17,13 +17,13 @@ task sampleCopyAll(type: Sync) {
|
|||
|
||||
task sample {
|
||||
// This does not work, task dependencies can't be providers
|
||||
// dependsOn exportBuildResources.resource('minimumRuntimeVersion')
|
||||
// dependsOn buildResources.resource('minimumRuntimeVersion')
|
||||
// Nor does this, despite https://github.com/gradle/gradle/issues/3811
|
||||
// dependsOn exportBuildResources.outputDir
|
||||
// dependsOn buildResources.outputDir
|
||||
// for now it's just
|
||||
dependsOn buildResources
|
||||
// we have to refference it at configuration time in order to be picked up
|
||||
ext.checkstyle_suppressions = buildResources.take('checkstyle_suppressions.xml')
|
||||
ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml')
|
||||
doLast {
|
||||
println "This task is using ${file(checkstyle_suppressions)}"
|
||||
}
|
||||
|
@ -33,6 +33,6 @@ task noConfigAfterExecution {
|
|||
dependsOn buildResources
|
||||
doLast {
|
||||
println "This should cause an error because we are refferencing " +
|
||||
"${buildResources.take('checkstyle_suppressions.xml')} after the `buildResources` task has ran."
|
||||
"${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran."
|
||||
}
|
||||
}
|
|
@ -48,7 +48,7 @@ import static java.util.Collections.emptySet;
|
|||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/licensing-apis.html">
|
||||
* X-Pack Licensing APIs on elastic.co</a> for more information.
|
||||
*/
|
||||
public class LicenseClient {
|
||||
public final class LicenseClient {
|
||||
|
||||
private final RestHighLevelClient restHighLevelClient;
|
||||
|
||||
|
@ -98,9 +98,8 @@ public class LicenseClient {
|
|||
response -> new GetLicenseResponse(convertResponseToJson(response)), listener, emptySet());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Converts an entire response into a json sting
|
||||
* Converts an entire response into a json string
|
||||
*
|
||||
* This is useful for responses that we don't parse on the client side, but instead work as string
|
||||
* such as in case of the license JSON
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* A wrapper for the {@link RestHighLevelClient} that provides methods for
|
||||
* accessing the Elastic License-related methods
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/migration-api.html">
|
||||
* X-Pack Migration APIs on elastic.co</a> for more information.
|
||||
*/
|
||||
public final class MigrationClient {
|
||||
|
||||
private final RestHighLevelClient restHighLevelClient;
|
||||
|
||||
MigrationClient(RestHighLevelClient restHighLevelClient) {
|
||||
this.restHighLevelClient = restHighLevelClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Migration Assistance for one or more indices
|
||||
*
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public IndexUpgradeInfoResponse getAssistance(IndexUpgradeInfoRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::getMigrationAssistance, options,
|
||||
IndexUpgradeInfoResponse::fromXContent, Collections.emptySet());
|
||||
}
|
||||
}
|
|
@ -114,6 +114,7 @@ import org.elasticsearch.protocol.xpack.indexlifecycle.StartILMRequest;
|
|||
import org.elasticsearch.protocol.xpack.indexlifecycle.StopILMRequest;
|
||||
import org.elasticsearch.protocol.xpack.license.GetLicenseRequest;
|
||||
import org.elasticsearch.protocol.xpack.license.PutLicenseRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
|
@ -1252,12 +1253,22 @@ final class RequestConverters {
|
|||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(putJobRequest.getJob().getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) {
|
||||
EndpointBuilder endpointBuilder = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/migration/assistance")
|
||||
.addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices());
|
||||
String endpoint = endpointBuilder.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions());
|
||||
return request;
|
||||
}
|
||||
|
||||
private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
|
||||
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
|
||||
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
||||
|
|
|
@ -210,6 +210,7 @@ public class RestHighLevelClient implements Closeable {
|
|||
private final XPackClient xPackClient = new XPackClient(this);
|
||||
private final WatcherClient watcherClient = new WatcherClient(this);
|
||||
private final LicenseClient licenseClient = new LicenseClient(this);
|
||||
private final MigrationClient migrationClient = new MigrationClient(this);
|
||||
private final MachineLearningClient machineLearningClient = new MachineLearningClient(this);
|
||||
private final IndexLifecycleClient ilmClient = new IndexLifecycleClient(this);
|
||||
|
||||
|
@ -342,7 +343,21 @@ public class RestHighLevelClient implements Closeable {
|
|||
* See the <a href="http://FILL-ME-IN-WE-HAVE-NO-DOCS-YET.com"> X-Pack APIs
|
||||
* on elastic.co</a> for more information.
|
||||
*/
|
||||
public IndexLifecycleClient indexLifecycle() { return ilmClient; }
|
||||
public IndexLifecycleClient indexLifecycle() {
|
||||
return ilmClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides methods for accessing the Elastic Licensed Licensing APIs that
|
||||
* are shipped with the default distribution of Elasticsearch. All of
|
||||
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
|
||||
* <p>
|
||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/migration-api.html">
|
||||
* Migration APIs on elastic.co</a> for more information.
|
||||
*/
|
||||
public MigrationClient migration() {
|
||||
return migrationClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides methods for accessing the Elastic Licensed Machine Learning APIs that
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MigrationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
public void testGetAssistance() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
IndexUpgradeInfoResponse response = client.migration().getAssistance(new IndexUpgradeInfoRequest(), RequestOptions.DEFAULT);
|
||||
assertEquals(0, response.getActions().size());
|
||||
}
|
||||
{
|
||||
client.indices().create(new CreateIndexRequest("test"), RequestOptions.DEFAULT);
|
||||
IndexUpgradeInfoResponse response = client.migration().getAssistance(
|
||||
new IndexUpgradeInfoRequest("test"), RequestOptions.DEFAULT);
|
||||
assertEquals(0, response.getActions().size());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -130,6 +130,7 @@ import org.elasticsearch.protocol.xpack.indexlifecycle.ExplainLifecycleRequest;
|
|||
import org.elasticsearch.protocol.xpack.indexlifecycle.SetIndexLifecyclePolicyRequest;
|
||||
import org.elasticsearch.protocol.xpack.indexlifecycle.StartILMRequest;
|
||||
import org.elasticsearch.protocol.xpack.indexlifecycle.StopILMRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
|
@ -2556,6 +2557,23 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
assertEquals(expectedParams, request.getParameters());
|
||||
}
|
||||
|
||||
public void testGetMigrationAssistance() {
|
||||
IndexUpgradeInfoRequest upgradeInfoRequest = new IndexUpgradeInfoRequest();
|
||||
String expectedEndpoint = "/_xpack/migration/assistance";
|
||||
if (randomBoolean()) {
|
||||
String[] indices = randomIndicesNames(1, 5);
|
||||
upgradeInfoRequest.indices(indices);
|
||||
expectedEndpoint += "/" + String.join(",", indices);
|
||||
}
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomIndicesOptions(upgradeInfoRequest::indicesOptions, upgradeInfoRequest::indicesOptions, expectedParams);
|
||||
Request request = RequestConverters.getMigrationAssistance(upgradeInfoRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals(expectedEndpoint, request.getEndpoint());
|
||||
assertNull(request.getEntity());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
}
|
||||
|
||||
public void testXPackPutWatch() throws Exception {
|
||||
PutWatchRequest putWatchRequest = new PutWatchRequest();
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
|
|
|
@ -758,6 +758,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
apiName.startsWith("license.") == false &&
|
||||
apiName.startsWith("machine_learning.") == false &&
|
||||
apiName.startsWith("watcher.") == false &&
|
||||
apiName.startsWith("migration.") == false &&
|
||||
apiName.startsWith("index_lifecycle.") == false) {
|
||||
apiNotFound.add(apiName);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.documentation;
|
||||
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This class is used to generate the Java Migration API documentation.
|
||||
* You need to wrap your code between two tags like:
|
||||
* // tag::example
|
||||
* // end::example
|
||||
*
|
||||
* Where example is your tag name.
|
||||
*
|
||||
* Then in the documentation, you can extract what is between tag and end tags with
|
||||
* ["source","java",subs="attributes,callouts,macros"]
|
||||
* --------------------------------------------------
|
||||
* include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[example]
|
||||
* --------------------------------------------------
|
||||
*
|
||||
* The column width of the code block is 84. If the code contains a line longer
|
||||
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
|
||||
* (the code indentation of the tag is not included in the width)
|
||||
*/
|
||||
public class MigrationClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
public void testGetAssistance() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
// tag::get-assistance-request
|
||||
IndexUpgradeInfoRequest request = new IndexUpgradeInfoRequest(); // <1>
|
||||
// end::get-assistance-request
|
||||
|
||||
// tag::get-assistance-request-indices
|
||||
request.indices("index1", "index2"); // <1>
|
||||
// end::get-assistance-request-indices
|
||||
|
||||
request.indices(Strings.EMPTY_ARRAY);
|
||||
|
||||
// tag::get-assistance-request-indices-options
|
||||
request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1>
|
||||
// end::get-assistance-request-indices-options
|
||||
|
||||
// tag::get-assistance-execute
|
||||
IndexUpgradeInfoResponse response = client.migration().getAssistance(request, RequestOptions.DEFAULT);
|
||||
// end::get-assistance-execute
|
||||
|
||||
// tag::get-assistance-response
|
||||
Map<String, UpgradeActionRequired> actions = response.getActions();
|
||||
for (Map.Entry<String, UpgradeActionRequired> entry : actions.entrySet()) {
|
||||
String index = entry.getKey(); // <1>
|
||||
UpgradeActionRequired actionRequired = entry.getValue(); // <2>
|
||||
}
|
||||
// end::get-assistance-response
|
||||
}
|
||||
}
|
|
@ -49,7 +49,7 @@ XContentBuilder docBuilder = XContentFactory.jsonBuilder().startObject();
|
|||
docBuilder.field("content", "This is amazing!");
|
||||
docBuilder.endObject(); //End of the JSON root object
|
||||
|
||||
PercolateQueryBuilder percolateQuery = new PercolateQueryBuilder("query", "docs", docBuilder.bytes());
|
||||
PercolateQueryBuilder percolateQuery = new PercolateQueryBuilder("query", "docs", BytesReference.bytes(docBuilder));
|
||||
|
||||
// Percolate, by executing the percolator query in the query dsl:
|
||||
SearchResponse response = client().prepareSearch("myIndexName")
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
[[java-rest-high-migration-get-assistance]]
|
||||
=== Migration Get Assistance
|
||||
|
||||
[[java-rest-high-migraton-get-assistance-request]]
|
||||
==== Index Upgrade Info Request
|
||||
|
||||
An `IndexUpgradeInfoRequest` does not require any argument:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request]
|
||||
--------------------------------------------------
|
||||
<1> Create a new request instance
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request-indices]
|
||||
--------------------------------------------------
|
||||
<1> Set the indices to the request
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request-indices-options]
|
||||
--------------------------------------------------
|
||||
<1> Set the `IndicesOptions` to control how unavailable indices are resolved and
|
||||
how wildcard expressions are expanded
|
||||
|
||||
[[java-rest-high-migration-get-assistance-execution]]
|
||||
==== Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-migration-get-assistance-response]]
|
||||
==== Response
|
||||
|
||||
The returned `IndexUpgradeInfoResponse` contains the actions required for each index.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-response]
|
||||
--------------------------------------------------
|
||||
<1> Retrieve the index
|
||||
<2> Retrieve the action required for the migration of the current index
|
|
@ -198,6 +198,14 @@ The Java High Level REST Client supports the following Licensing APIs:
|
|||
include::licensing/put-license.asciidoc[]
|
||||
include::licensing/get-license.asciidoc[]
|
||||
|
||||
== Migration APIs
|
||||
|
||||
The Java High Level REST Client supports the following Migration APIs:
|
||||
|
||||
* <<java-rest-high-migration-get-assistance>>
|
||||
|
||||
include::migration/get-assistance.asciidoc[]
|
||||
|
||||
== Watcher APIs
|
||||
|
||||
The Java High Level REST Client supports the following Watcher APIs:
|
||||
|
|
|
@ -12,7 +12,7 @@ optional as part of a full metric aggregation.
|
|||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
`state` (`Map`)::
|
||||
`Map` with values available from the prior map script.
|
||||
|
||||
*Return*
|
||||
|
|
|
@ -12,13 +12,13 @@ full metric aggregation.
|
|||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
`state` (`Map`)::
|
||||
Empty `Map` used to add values for use in a
|
||||
<<painless-metric-agg-map-context, map script>>.
|
||||
|
||||
*Side Effects*
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
`state` (`Map`)::
|
||||
Add values to this `Map` to for use in a map. Additional values must
|
||||
be of the type `Map`, `List`, `String` or primitive.
|
||||
|
||||
|
|
|
@ -13,10 +13,9 @@ part of a full metric aggregation.
|
|||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
`state` (`Map`)::
|
||||
`Map` used to add values for processing in a
|
||||
<<painless-metric-agg-map-context, combine script>> or returned
|
||||
directly.
|
||||
<<painless-metric-agg-map-context, combine script>> or to be returned from the aggregation.
|
||||
|
||||
`doc` (`Map`, read-only)::
|
||||
Contains the fields of the current document where each field is a
|
||||
|
@ -27,15 +26,16 @@ part of a full metric aggregation.
|
|||
|
||||
*Side Effects*
|
||||
|
||||
`params['_agg']` (`Map`)::
|
||||
`state` (`Map`)::
|
||||
Use this `Map` to add values for processing in a combine script.
|
||||
Additional values must be of the type `Map`, `List`, `String` or
|
||||
primitive. If an initialization script is provided as part the
|
||||
primitive. The same `state` `Map` is shared between all aggregated documents
|
||||
on a given shard. If an initialization script is provided as part of the
|
||||
aggregation then values added from the initialization script are
|
||||
available as well. If no combine script is specified, values must be
|
||||
directly stored in `_agg`. If no combine script and no
|
||||
available. If no combine script is specified, values must be
|
||||
directly stored in `state` in a usable form. If no combine script and no
|
||||
<<painless-metric-agg-reduce-context, reduce script>> are specified, the
|
||||
values are used as the result.
|
||||
`state` values are used as the result.
|
||||
|
||||
*Return*
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ specified) and is optional as part of a full metric aggregation.
|
|||
`params` (`Map`, read-only)::
|
||||
User-defined parameters passed in as part of the query.
|
||||
|
||||
`params['_aggs']` (`Map`)::
|
||||
`states` (`Map`)::
|
||||
`Map` with values available from the prior combine script (or a map
|
||||
script if no combine script is specified).
|
||||
|
||||
|
|
|
@ -92,8 +92,7 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
|||
final List<HttpHost> hosts,
|
||||
final Version esVersion,
|
||||
final Version masterVersion) {
|
||||
return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion,
|
||||
restClientBuilder -> configureClient(restClientBuilder, restClientSettings()));
|
||||
return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, this::getClientBuilderWithSniffedHosts);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.Objects;
|
|||
* constructors, methods, and fields that can be used within a Painless script at both compile-time
|
||||
* and run-time.
|
||||
*
|
||||
* A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each
|
||||
* A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each
|
||||
* {@link WhitelistClass} will contain zero-to-many {@link WhitelistConstructor}s, {@link WhitelistMethod}s, and
|
||||
* {@link WhitelistField}s which are what will be available with a Painless script. See each individual
|
||||
* whitelist object for more detail.
|
||||
|
@ -56,14 +56,14 @@ public final class Whitelist {
|
|||
Collections.singletonList(WhitelistLoader.loadFromResourceFiles(Whitelist.class, BASE_WHITELIST_FILES));
|
||||
|
||||
/** The {@link ClassLoader} used to look up the whitelisted Java classes, constructors, methods, and fields. */
|
||||
public final ClassLoader javaClassLoader;
|
||||
public final ClassLoader classLoader;
|
||||
|
||||
/** The {@link List} of all the whitelisted Painless classes. */
|
||||
public final List<WhitelistClass> whitelistStructs;
|
||||
public final List<WhitelistClass> whitelistClasses;
|
||||
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public Whitelist(ClassLoader javaClassLoader, List<WhitelistClass> whitelistStructs) {
|
||||
this.javaClassLoader = Objects.requireNonNull(javaClassLoader);
|
||||
this.whitelistStructs = Collections.unmodifiableList(Objects.requireNonNull(whitelistStructs));
|
||||
public Whitelist(ClassLoader classLoader, List<WhitelistClass> whitelistClasses) {
|
||||
this.classLoader = Objects.requireNonNull(classLoader);
|
||||
this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Objects;
|
|||
* specific context, as long as multiple classes representing the same Java class have the same
|
||||
* class name and have legal constructor/method overloading they can be merged together.
|
||||
*
|
||||
* Classes in Painless allow for arity overloading for constructors and methods. Arity overloading
|
||||
* Classes in Painless allow for arity overloading for constructors and methods. Arity overloading
|
||||
* means that multiple constructors are allowed for a single class as long as they have a different
|
||||
* number of parameters, and multiples methods with the same name are allowed for a single class
|
||||
* as long as they have the same return type and a different number of parameters.
|
||||
|
@ -40,7 +40,7 @@ import java.util.Objects;
|
|||
*/
|
||||
public final class WhitelistClass {
|
||||
|
||||
/** Information about where this class was white-listed from. Can be used for error messages. */
|
||||
/** Information about where this class was white-listed from. */
|
||||
public final String origin;
|
||||
|
||||
/** The Java class name this class represents. */
|
||||
|
@ -49,7 +49,7 @@ public final class WhitelistClass {
|
|||
/**
|
||||
* Allow the Java class name to only be specified as the fully-qualified name.
|
||||
*/
|
||||
public final boolean onlyFQNJavaClassName;
|
||||
public final boolean noImport;
|
||||
|
||||
/** The {@link List} of whitelisted ({@link WhitelistConstructor}s) available to this class. */
|
||||
public final List<WhitelistConstructor> whitelistConstructors;
|
||||
|
@ -61,13 +61,14 @@ public final class WhitelistClass {
|
|||
public final List<WhitelistField> whitelistFields;
|
||||
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public WhitelistClass(String origin, String javaClassName, boolean onlyFQNJavaClassName,
|
||||
public WhitelistClass(String origin, String javaClassName, boolean noImport,
|
||||
List<WhitelistConstructor> whitelistConstructors,
|
||||
List<WhitelistMethod> whitelistMethods,
|
||||
List<WhitelistField> whitelistFields) {
|
||||
|
||||
this.origin = Objects.requireNonNull(origin);
|
||||
this.javaClassName = Objects.requireNonNull(javaClassName);
|
||||
this.onlyFQNJavaClassName = onlyFQNJavaClassName;
|
||||
this.noImport = noImport;
|
||||
|
||||
this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(whitelistConstructors));
|
||||
this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistMethods));
|
||||
|
|
|
@ -25,24 +25,24 @@ import java.util.Objects;
|
|||
|
||||
/**
|
||||
* Constructor represents the equivalent of a Java constructor available as a whitelisted class
|
||||
* constructor within Painless. Constructors for Painless classes may be accessed exactly as
|
||||
* constructors for Java classes are using the 'new' keyword. Painless classes may have multiple
|
||||
* constructor within Painless. Constructors for Painless classes may be accessed exactly as
|
||||
* constructors for Java classes are using the 'new' keyword. Painless classes may have multiple
|
||||
* constructors as long as they comply with arity overloading described for {@link WhitelistClass}.
|
||||
*/
|
||||
public final class WhitelistConstructor {
|
||||
|
||||
/** Information about where this constructor was whitelisted from. Can be used for error messages. */
|
||||
/** Information about where this constructor was whitelisted from. */
|
||||
public final String origin;
|
||||
|
||||
/**
|
||||
* A {@link List} of {@link String}s that are the Painless type names for the parameters of the
|
||||
* constructor which can be used to look up the Java constructor through reflection.
|
||||
*/
|
||||
public final List<String> painlessParameterTypeNames;
|
||||
public final List<String> canonicalTypeNameParameters;
|
||||
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public WhitelistConstructor(String origin, List<String> painlessParameterTypeNames) {
|
||||
public WhitelistConstructor(String origin, List<String> canonicalTypeNameParameters) {
|
||||
this.origin = Objects.requireNonNull(origin);
|
||||
this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames));
|
||||
this.canonicalTypeNameParameters = Collections.unmodifiableList(Objects.requireNonNull(canonicalTypeNameParameters));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,24 +23,24 @@ import java.util.Objects;
|
|||
|
||||
/**
|
||||
* Field represents the equivalent of a Java field available as a whitelisted class field
|
||||
* within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes
|
||||
* within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes
|
||||
* are using the '.' operator on an existing class variable/field.
|
||||
*/
|
||||
public class WhitelistField {
|
||||
|
||||
/** Information about where this method was whitelisted from. Can be used for error messages. */
|
||||
/** Information about where this method was whitelisted from. */
|
||||
public final String origin;
|
||||
|
||||
/** The Java field name used to look up the Java field through reflection. */
|
||||
public final String javaFieldName;
|
||||
/** The field name used to look up the field reflection object. */
|
||||
public final String fieldName;
|
||||
|
||||
/** The Painless type name for the field which can be used to look up the Java field through reflection. */
|
||||
public final String painlessFieldTypeName;
|
||||
/** The canonical type name for the field which can be used to look up the Java field through reflection. */
|
||||
public final String canonicalTypeNameParameter;
|
||||
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public WhitelistField(String origin, String javaFieldName, String painlessFieldTypeName) {
|
||||
public WhitelistField(String origin, String fieldName, String canonicalTypeNameParameter) {
|
||||
this.origin = Objects.requireNonNull(origin);
|
||||
this.javaFieldName = Objects.requireNonNull(javaFieldName);
|
||||
this.painlessFieldTypeName = Objects.requireNonNull(painlessFieldTypeName);
|
||||
this.fieldName = Objects.requireNonNull(fieldName);
|
||||
this.canonicalTypeNameParameter = Objects.requireNonNull(canonicalTypeNameParameter);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,14 +35,14 @@ import java.util.List;
|
|||
public final class WhitelistLoader {
|
||||
|
||||
/**
|
||||
* Loads and creates a {@link Whitelist} from one to many text files. The file paths are passed in as an array of
|
||||
* Loads and creates a {@link Whitelist} from one to many text files. The file paths are passed in as an array of
|
||||
* {@link String}s with a single {@link Class} to be be used to load the resources where each {@link String}
|
||||
* is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java
|
||||
* is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java
|
||||
* reflection objects for each individual {@link Class}, {@link Constructor}, {@link Method}, and {@link Field}
|
||||
* specified as part of the whitelist in the text file.
|
||||
*
|
||||
* A single pass is made through each file to collect all the information about each class, constructor, method,
|
||||
* and field. Most validation will be done at a later point after all whitelists have been gathered and their
|
||||
* and field. Most validation will be done at a later point after all whitelists have been gathered and their
|
||||
* merging takes place.
|
||||
*
|
||||
* A painless type name is one of the following:
|
||||
|
@ -52,7 +52,7 @@ public final class WhitelistLoader {
|
|||
* <li> fully-qualified Java type name - Any whitelisted Java class will have the equivalent name as
|
||||
* a Painless type name with the exception that any dollar symbols used as part of inner classes will
|
||||
* be replaced with dot symbols. </li>
|
||||
* <li> short Java type name - The text after the final dot symbol of any specified Java class. A
|
||||
* <li> short Java type name - The text after the final dot symbol of any specified Java class. A
|
||||
* short type Java name may be excluded by using the 'only_fqn' token during Painless class parsing
|
||||
* as described later. </li>
|
||||
* </ul>
|
||||
|
@ -60,7 +60,7 @@ public final class WhitelistLoader {
|
|||
* The following can be parsed from each whitelist text file:
|
||||
* <ul>
|
||||
* <li> Blank lines will be ignored by the parser. </li>
|
||||
* <li> Comments may be created starting with a pound '#' symbol and end with a newline. These will
|
||||
* <li> Comments may be created starting with a pound '#' symbol and end with a newline. These will
|
||||
* be ignored by the parser. </li>
|
||||
* <li> Primitive types may be specified starting with 'class' and followed by the Java type name,
|
||||
* an opening bracket, a newline, a closing bracket, and a final newline. </li>
|
||||
|
@ -93,10 +93,10 @@ public final class WhitelistLoader {
|
|||
*
|
||||
* Note there must be a one-to-one correspondence of Painless type names to Java type/class names.
|
||||
* If the same Painless type is defined across multiple files and the Java class is the same, all
|
||||
* specified constructors, methods, and fields will be merged into a single Painless type. The
|
||||
* specified constructors, methods, and fields will be merged into a single Painless type. The
|
||||
* Painless dynamic type, 'def', used as part of constructor, method, and field definitions will
|
||||
* be appropriately parsed and handled. Painless complex types must be specified with the
|
||||
* fully-qualified Java class name. Method argument types, method return types, and field types
|
||||
* be appropriately parsed and handled. Painless complex types must be specified with the
|
||||
* fully-qualified Java class name. Method argument types, method return types, and field types
|
||||
* must be specified with Painless type names (def, fully-qualified, or short) as described earlier.
|
||||
*
|
||||
* The following example is used to create a single whitelist text file:
|
||||
|
@ -132,7 +132,7 @@ public final class WhitelistLoader {
|
|||
* }
|
||||
*/
|
||||
public static Whitelist loadFromResourceFiles(Class<?> resource, String... filepaths) {
|
||||
List<WhitelistClass> whitelistStructs = new ArrayList<>();
|
||||
List<WhitelistClass> whitelistClasses = new ArrayList<>();
|
||||
|
||||
// Execute a single pass through the whitelist text files. This will gather all the
|
||||
// constructors, methods, augmented methods, and fields for each whitelisted class.
|
||||
|
@ -143,7 +143,7 @@ public final class WhitelistLoader {
|
|||
try (LineNumberReader reader = new LineNumberReader(
|
||||
new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8))) {
|
||||
|
||||
String whitelistStructOrigin = null;
|
||||
String whitelistClassOrigin = null;
|
||||
String javaClassName = null;
|
||||
boolean onlyFQNJavaClassName = false;
|
||||
List<WhitelistConstructor> whitelistConstructors = null;
|
||||
|
@ -178,7 +178,7 @@ public final class WhitelistLoader {
|
|||
throw new IllegalArgumentException("invalid class definition: failed to parse class name [" + line + "]");
|
||||
}
|
||||
|
||||
whitelistStructOrigin = "[" + filepath + "]:[" + number + "]";
|
||||
whitelistClassOrigin = "[" + filepath + "]:[" + number + "]";
|
||||
javaClassName = tokens[0];
|
||||
|
||||
// Reset all the constructors, methods, and fields to support a new class.
|
||||
|
@ -194,11 +194,11 @@ public final class WhitelistLoader {
|
|||
throw new IllegalArgumentException("invalid class definition: extraneous closing bracket");
|
||||
}
|
||||
|
||||
whitelistStructs.add(new WhitelistClass(whitelistStructOrigin, javaClassName, onlyFQNJavaClassName,
|
||||
whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName, onlyFQNJavaClassName,
|
||||
whitelistConstructors, whitelistMethods, whitelistFields));
|
||||
|
||||
// Set all the variables to null to ensure a new class definition is found before other parsable values.
|
||||
whitelistStructOrigin = null;
|
||||
whitelistClassOrigin = null;
|
||||
javaClassName = null;
|
||||
onlyFQNJavaClassName = false;
|
||||
whitelistConstructors = null;
|
||||
|
@ -300,7 +300,7 @@ public final class WhitelistLoader {
|
|||
}
|
||||
ClassLoader loader = AccessController.doPrivileged((PrivilegedAction<ClassLoader>)resource::getClassLoader);
|
||||
|
||||
return new Whitelist(loader, whitelistStructs);
|
||||
return new Whitelist(loader, whitelistClasses);
|
||||
}
|
||||
|
||||
private WhitelistLoader() {}
|
||||
|
|
|
@ -25,52 +25,53 @@ import java.util.Objects;
|
|||
|
||||
/**
|
||||
* Method represents the equivalent of a Java method available as a whitelisted class method
|
||||
* within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes
|
||||
* are using the '.' operator on an existing class variable/field. Painless classes may have multiple
|
||||
* methods with the same name as long as they comply with arity overloading described for {@link WhitelistMethod}.
|
||||
* within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes
|
||||
* are using the '.' operator on an existing class variable/field. Painless classes may have multiple
|
||||
* methods with the same name as long as they comply with arity overloading described in
|
||||
* {@link WhitelistClass}.
|
||||
*
|
||||
* Classes may also have additional methods that are not part of the Java class the class represents -
|
||||
* these are known as augmented methods. An augmented method can be added to a class as a part of any
|
||||
* these are known as augmented methods. An augmented method can be added to a class as a part of any
|
||||
* Java class as long as the method is static and the first parameter of the method is the Java class
|
||||
* represented by the class. Note that the augmented method's parent Java class does not need to be
|
||||
* represented by the class. Note that the augmented method's parent Java class does not need to be
|
||||
* whitelisted.
|
||||
*/
|
||||
public class WhitelistMethod {
|
||||
|
||||
/** Information about where this method was whitelisted from. Can be used for error messages. */
|
||||
/** Information about where this method was whitelisted from. */
|
||||
public final String origin;
|
||||
|
||||
/**
|
||||
* The Java class name for the owner of an augmented method. If the method is not augmented
|
||||
* The class name for the owner of an augmented method. If the method is not augmented
|
||||
* this should be {@code null}.
|
||||
*/
|
||||
public final String javaAugmentedClassName;
|
||||
public final String augmentedCanonicalClassName;
|
||||
|
||||
/** The Java method name used to look up the Java method through reflection. */
|
||||
public final String javaMethodName;
|
||||
/** The method name used to look up the method reflection object. */
|
||||
public final String methodName;
|
||||
|
||||
/**
|
||||
* The Painless type name for the return type of the method which can be used to look up the Java
|
||||
* method through reflection.
|
||||
* The canonical type name for the return type.
|
||||
*/
|
||||
public final String painlessReturnTypeName;
|
||||
public final String returnCanonicalTypeName;
|
||||
|
||||
/**
|
||||
* A {@link List} of {@link String}s that are the Painless type names for the parameters of the
|
||||
* method which can be used to look up the Java method through reflection.
|
||||
* A {@link List} of {@link String}s that are the canonical type names for the parameters of the
|
||||
* method used to look up the method reflection object.
|
||||
*/
|
||||
public final List<String> painlessParameterTypeNames;
|
||||
public final List<String> canonicalTypeNameParameters;
|
||||
|
||||
/**
|
||||
* Standard constructor. All values must be not {@code null} with the exception of jAugmentedClass;
|
||||
* jAugmentedClass will be {@code null} unless the method is augmented as described in the class documentation.
|
||||
* Standard constructor. All values must be not {@code null} with the exception of
|
||||
* augmentedCanonicalClassName; augmentedCanonicalClassName will be {@code null} unless the method
|
||||
* is augmented as described in the class documentation.
|
||||
*/
|
||||
public WhitelistMethod(String origin, String javaAugmentedClassName, String javaMethodName,
|
||||
String painlessReturnTypeName, List<String> painlessParameterTypeNames) {
|
||||
public WhitelistMethod(String origin, String augmentedCanonicalClassName, String methodName,
|
||||
String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
|
||||
this.origin = Objects.requireNonNull(origin);
|
||||
this.javaAugmentedClassName = javaAugmentedClassName;
|
||||
this.javaMethodName = javaMethodName;
|
||||
this.painlessReturnTypeName = Objects.requireNonNull(painlessReturnTypeName);
|
||||
this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames));
|
||||
this.augmentedCanonicalClassName = augmentedCanonicalClassName;
|
||||
this.methodName = methodName;
|
||||
this.returnCanonicalTypeName = Objects.requireNonNull(returnCanonicalTypeName);
|
||||
this.canonicalTypeNameParameters = Collections.unmodifiableList(Objects.requireNonNull(canonicalTypeNameParameters));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,10 +50,10 @@ public final class PainlessLookup {
|
|||
return canonicalClassNamesToClasses.containsKey(canonicalClassName);
|
||||
}
|
||||
|
||||
public Class<?> canonicalTypeNameToType(String painlessType) {
|
||||
Objects.requireNonNull(painlessType);
|
||||
public Class<?> canonicalTypeNameToType(String canonicalTypeName) {
|
||||
Objects.requireNonNull(canonicalTypeName);
|
||||
|
||||
return PainlessLookupUtility.canonicalTypeNameToType(painlessType, canonicalClassNamesToClasses);
|
||||
return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses);
|
||||
}
|
||||
|
||||
public Set<Class<?>> getClasses() {
|
||||
|
@ -64,10 +64,10 @@ public final class PainlessLookup {
|
|||
return classesToPainlessClasses.get(targetClass);
|
||||
}
|
||||
|
||||
public PainlessConstructor lookupPainlessConstructor(String targetClassName, int constructorArity) {
|
||||
Objects.requireNonNull(targetClassName);
|
||||
public PainlessConstructor lookupPainlessConstructor(String targetCanonicalClassName, int constructorArity) {
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
|
||||
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
||||
Class<?> targetClass = canonicalTypeNameToType(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
return null;
|
||||
|
@ -95,10 +95,10 @@ public final class PainlessLookup {
|
|||
return painlessConstructor;
|
||||
}
|
||||
|
||||
public PainlessMethod lookupPainlessMethod(String targetClassName, boolean isStatic, String methodName, int methodArity) {
|
||||
Objects.requireNonNull(targetClassName);
|
||||
public PainlessMethod lookupPainlessMethod(String targetCanonicalClassName, boolean isStatic, String methodName, int methodArity) {
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
|
||||
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
||||
Class<?> targetClass = canonicalTypeNameToType(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
return null;
|
||||
|
@ -127,10 +127,10 @@ public final class PainlessLookup {
|
|||
targetPainlessClass.methods.get(painlessMethodKey);
|
||||
}
|
||||
|
||||
public PainlessField lookupPainlessField(String targetClassName, boolean isStatic, String fieldName) {
|
||||
Objects.requireNonNull(targetClassName);
|
||||
public PainlessField lookupPainlessField(String targetCanonicalClassName, boolean isStatic, String fieldName) {
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
|
||||
Class<?> targetClass = canonicalTypeNameToType(targetClassName);
|
||||
Class<?> targetClass = canonicalTypeNameToType(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
return null;
|
||||
|
@ -199,9 +199,7 @@ public final class PainlessLookup {
|
|||
return lookupRuntimePainlessObject(originalTargetClass, objectLookup);
|
||||
}
|
||||
|
||||
private <T> T lookupRuntimePainlessObject(
|
||||
Class<?> originalTargetClass, Function<PainlessClass, T> objectLookup) {
|
||||
|
||||
private <T> T lookupRuntimePainlessObject(Class<?> originalTargetClass, Function<PainlessClass, T> objectLookup) {
|
||||
Class<?> currentTargetClass = originalTargetClass;
|
||||
|
||||
while (currentTargetClass != null) {
|
||||
|
|
|
@ -166,35 +166,35 @@ public final class PainlessLookupBuilder {
|
|||
|
||||
try {
|
||||
for (Whitelist whitelist : whitelists) {
|
||||
for (WhitelistClass whitelistClass : whitelist.whitelistStructs) {
|
||||
for (WhitelistClass whitelistClass : whitelist.whitelistClasses) {
|
||||
origin = whitelistClass.origin;
|
||||
painlessLookupBuilder.addPainlessClass(
|
||||
whitelist.javaClassLoader, whitelistClass.javaClassName, whitelistClass.onlyFQNJavaClassName == false);
|
||||
whitelist.classLoader, whitelistClass.javaClassName, whitelistClass.noImport == false);
|
||||
}
|
||||
}
|
||||
|
||||
for (Whitelist whitelist : whitelists) {
|
||||
for (WhitelistClass whitelistClass : whitelist.whitelistStructs) {
|
||||
for (WhitelistClass whitelistClass : whitelist.whitelistClasses) {
|
||||
String targetCanonicalClassName = whitelistClass.javaClassName.replace('$', '.');
|
||||
|
||||
for (WhitelistConstructor whitelistConstructor : whitelistClass.whitelistConstructors) {
|
||||
origin = whitelistConstructor.origin;
|
||||
painlessLookupBuilder.addPainlessConstructor(
|
||||
targetCanonicalClassName, whitelistConstructor.painlessParameterTypeNames);
|
||||
targetCanonicalClassName, whitelistConstructor.canonicalTypeNameParameters);
|
||||
}
|
||||
|
||||
for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) {
|
||||
origin = whitelistMethod.origin;
|
||||
painlessLookupBuilder.addPainlessMethod(
|
||||
whitelist.javaClassLoader, targetCanonicalClassName, whitelistMethod.javaAugmentedClassName,
|
||||
whitelistMethod.javaMethodName, whitelistMethod.painlessReturnTypeName,
|
||||
whitelistMethod.painlessParameterTypeNames);
|
||||
whitelist.classLoader, targetCanonicalClassName, whitelistMethod.augmentedCanonicalClassName,
|
||||
whitelistMethod.methodName, whitelistMethod.returnCanonicalTypeName,
|
||||
whitelistMethod.canonicalTypeNameParameters);
|
||||
}
|
||||
|
||||
for (WhitelistField whitelistField : whitelistClass.whitelistFields) {
|
||||
origin = whitelistField.origin;
|
||||
painlessLookupBuilder.addPainlessField(
|
||||
targetCanonicalClassName, whitelistField.javaFieldName, whitelistField.painlessFieldTypeName);
|
||||
targetCanonicalClassName, whitelistField.fieldName, whitelistField.canonicalTypeNameParameter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -315,25 +315,25 @@ public final class PainlessLookupBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
public void addPainlessConstructor(String targetCanonicalClassName, List<String> typeNameParameters) {
|
||||
public void addPainlessConstructor(String targetCanonicalClassName, List<String> canonicalTypeNameParameters) {
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
Objects.requireNonNull(typeNameParameters);
|
||||
Objects.requireNonNull(canonicalTypeNameParameters);
|
||||
|
||||
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" +
|
||||
"for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]");
|
||||
"for constructor [[" + targetCanonicalClassName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
List<Class<?>> typeParameters = new ArrayList<>(typeNameParameters.size());
|
||||
List<Class<?>> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size());
|
||||
|
||||
for (String typeNameParameter : typeNameParameters) {
|
||||
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||
for (String canonicalTypeNameParameter : canonicalTypeNameParameters) {
|
||||
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
|
||||
|
||||
if (typeParameter == null) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " +
|
||||
"for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]");
|
||||
throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found " +
|
||||
"for constructor [[" + targetCanonicalClassName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
typeParameters.add(typeParameter);
|
||||
|
@ -409,19 +409,19 @@ public final class PainlessLookupBuilder {
|
|||
}
|
||||
|
||||
public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, String augmentedCanonicalClassName,
|
||||
String methodName, String returnCanonicalTypeName, List<String> typeNameParameters) {
|
||||
String methodName, String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
|
||||
|
||||
Objects.requireNonNull(classLoader);
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
Objects.requireNonNull(methodName);
|
||||
Objects.requireNonNull(returnCanonicalTypeName);
|
||||
Objects.requireNonNull(typeNameParameters);
|
||||
Objects.requireNonNull(canonicalTypeNameParameters);
|
||||
|
||||
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]");
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
Class<?> augmentedClass = null;
|
||||
|
@ -431,18 +431,18 @@ public final class PainlessLookupBuilder {
|
|||
augmentedClass = Class.forName(augmentedCanonicalClassName, true, classLoader);
|
||||
} catch (ClassNotFoundException cnfe) {
|
||||
throw new IllegalArgumentException("augmented class [" + augmentedCanonicalClassName + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", cnfe);
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]", cnfe);
|
||||
}
|
||||
}
|
||||
|
||||
List<Class<?>> typeParameters = new ArrayList<>(typeNameParameters.size());
|
||||
List<Class<?>> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size());
|
||||
|
||||
for (String typeNameParameter : typeNameParameters) {
|
||||
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||
for (String canonicalTypeNameParameter : canonicalTypeNameParameters) {
|
||||
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
|
||||
|
||||
if (typeParameter == null) {
|
||||
throw new IllegalArgumentException("parameter type [" + typeNameParameter + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]");
|
||||
throw new IllegalArgumentException("parameter type [" + canonicalTypeNameParameter + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
typeParameters.add(typeParameter);
|
||||
|
@ -452,7 +452,7 @@ public final class PainlessLookupBuilder {
|
|||
|
||||
if (returnType == null) {
|
||||
throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]");
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters);
|
||||
|
@ -607,10 +607,10 @@ public final class PainlessLookupBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
public void addPainlessField(String targetCanonicalClassName, String fieldName, String typeNameParameter) {
|
||||
public void addPainlessField(String targetCanonicalClassName, String fieldName, String canonicalTypeNameParameter) {
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
Objects.requireNonNull(fieldName);
|
||||
Objects.requireNonNull(typeNameParameter);
|
||||
Objects.requireNonNull(canonicalTypeNameParameter);
|
||||
|
||||
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
|
||||
|
||||
|
@ -618,10 +618,10 @@ public final class PainlessLookupBuilder {
|
|||
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
|
||||
}
|
||||
|
||||
Class<?> typeParameter = canonicalTypeNameToType(typeNameParameter);
|
||||
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
|
||||
|
||||
if (typeParameter == null) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " +
|
||||
throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found " +
|
||||
"for field [[" + targetCanonicalClassName + "], [" + fieldName + "]");
|
||||
}
|
||||
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.Table;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
|
@ -380,7 +381,8 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||
table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted());
|
||||
|
||||
table.addCell(indexMetaData.getCreationDate());
|
||||
table.addCell(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC));
|
||||
ZonedDateTime creationTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC);
|
||||
table.addCell(DateFormatters.forPattern("strict_date_time").format(creationTime));
|
||||
|
||||
table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size());
|
||||
table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size());
|
||||
|
|
|
@ -284,6 +284,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
|
|||
assertSameIndices(updateRequest, updateShardActions);
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32808")
|
||||
public void testBulk() {
|
||||
String[] bulkShardActions = new String[]{BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]"};
|
||||
interceptTransportActions(bulkShardActions);
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.CheckedSupplier;
|
||||
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -50,8 +50,8 @@ public final class ClientYamlDocsTestClient extends ClientYamlTestClient {
|
|||
final List<HttpHost> hosts,
|
||||
final Version esVersion,
|
||||
final Version masterVersion,
|
||||
final CheckedConsumer<RestClientBuilder, IOException> clientBuilderConsumer) {
|
||||
super(restSpec, restClient, hosts, esVersion, masterVersion, clientBuilderConsumer);
|
||||
final CheckedSupplier<RestClientBuilder, IOException> clientBuilderWithSniffedNodes) {
|
||||
super(restSpec, restClient, hosts, esVersion, masterVersion, clientBuilderWithSniffedNodes);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.http.entity.ContentType;
|
|||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Node;
|
||||
import org.elasticsearch.client.NodeSelector;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
|
@ -34,7 +33,7 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.CheckedSupplier;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi;
|
||||
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath;
|
||||
|
@ -66,7 +65,7 @@ public class ClientYamlTestClient implements Closeable {
|
|||
private final Map<NodeSelector, RestClient> restClients = new HashMap<>();
|
||||
private final Version esVersion;
|
||||
private final Version masterVersion;
|
||||
private final CheckedConsumer<RestClientBuilder, IOException> clientBuilderConsumer;
|
||||
private final CheckedSupplier<RestClientBuilder, IOException> clientBuilderWithSniffedNodes;
|
||||
|
||||
ClientYamlTestClient(
|
||||
final ClientYamlSuiteRestSpec restSpec,
|
||||
|
@ -74,13 +73,13 @@ public class ClientYamlTestClient implements Closeable {
|
|||
final List<HttpHost> hosts,
|
||||
final Version esVersion,
|
||||
final Version masterVersion,
|
||||
final CheckedConsumer<RestClientBuilder, IOException> clientBuilderConsumer) {
|
||||
final CheckedSupplier<RestClientBuilder, IOException> clientBuilderWithSniffedNodes) {
|
||||
assert hosts.size() > 0;
|
||||
this.restSpec = restSpec;
|
||||
this.restClients.put(NodeSelector.ANY, restClient);
|
||||
this.esVersion = esVersion;
|
||||
this.masterVersion = masterVersion;
|
||||
this.clientBuilderConsumer = clientBuilderConsumer;
|
||||
this.clientBuilderWithSniffedNodes = clientBuilderWithSniffedNodes;
|
||||
}
|
||||
|
||||
public Version getEsVersion() {
|
||||
|
@ -199,10 +198,9 @@ public class ClientYamlTestClient implements Closeable {
|
|||
protected RestClient getRestClient(NodeSelector nodeSelector) {
|
||||
//lazily build a new client in case we need to point to some specific node
|
||||
return restClients.computeIfAbsent(nodeSelector, selector -> {
|
||||
RestClient anyClient = restClients.get(NodeSelector.ANY);
|
||||
RestClientBuilder builder = RestClient.builder(anyClient.getNodes().toArray(new Node[0]));
|
||||
RestClientBuilder builder;
|
||||
try {
|
||||
clientBuilderConsumer.accept(builder);
|
||||
builder = clientBuilderWithSniffedNodes.get();
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.client.Node;
|
|||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
import org.elasticsearch.client.sniff.ElasticsearchNodesSniffer;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
|
@ -58,13 +59,6 @@ import java.util.Set;
|
|||
/**
|
||||
* Runs a suite of yaml tests shared with all the official Elasticsearch
|
||||
* clients against against an elasticsearch cluster.
|
||||
* <p>
|
||||
* <strong>IMPORTANT</strong>: These tests sniff the cluster for metadata
|
||||
* and hosts on startup and replace the list of hosts that they are
|
||||
* configured to use with the list sniffed from the cluster. So you can't
|
||||
* control which nodes receive the request by providing the right list of
|
||||
* nodes in the <code>tests.rest.cluster</code> system property. Instead
|
||||
* the tests must explictly use `node_selector`s.
|
||||
*/
|
||||
public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
|
||||
|
||||
|
@ -123,11 +117,6 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
|
|||
@Before
|
||||
public void initAndResetContext() throws Exception {
|
||||
if (restTestExecutionContext == null) {
|
||||
// Sniff host metadata in case we need it in the yaml tests
|
||||
List<Node> nodesWithMetadata = sniffHostMetadata();
|
||||
client().setNodes(nodesWithMetadata);
|
||||
adminClient().setNodes(nodesWithMetadata);
|
||||
|
||||
assert adminExecutionContext == null;
|
||||
assert blacklistPathMatchers == null;
|
||||
final ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load(SPEC_PATH);
|
||||
|
@ -166,8 +155,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
|
|||
final List<HttpHost> hosts,
|
||||
final Version esVersion,
|
||||
final Version masterVersion) {
|
||||
return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion,
|
||||
restClientBuilder -> configureClient(restClientBuilder, restClientSettings()));
|
||||
return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion, this::getClientBuilderWithSniffedHosts);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
@ -408,13 +396,16 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* Sniff the cluster for host metadata.
|
||||
* Sniff the cluster for host metadata and return a
|
||||
* {@link RestClientBuilder} for a client with that metadata.
|
||||
*/
|
||||
private List<Node> sniffHostMetadata() throws IOException {
|
||||
protected final RestClientBuilder getClientBuilderWithSniffedHosts() throws IOException {
|
||||
ElasticsearchNodesSniffer.Scheme scheme =
|
||||
ElasticsearchNodesSniffer.Scheme.valueOf(getProtocol().toUpperCase(Locale.ROOT));
|
||||
ElasticsearchNodesSniffer sniffer = new ElasticsearchNodesSniffer(
|
||||
adminClient(), ElasticsearchNodesSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT, scheme);
|
||||
return sniffer.sniff();
|
||||
RestClientBuilder builder = RestClient.builder(sniffer.sniff().toArray(new Node[0]));
|
||||
configureClient(builder, restClientSettings());
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -391,7 +391,32 @@ public class DoSection implements ExecutableSection {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
key = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
NodeSelector newSelector = new HasAttributeNodeSelector(key, parser.text());
|
||||
/*
|
||||
* HasAttributeNodeSelector selects nodes that do not have
|
||||
* attribute metadata set so it can be used against nodes that
|
||||
* have not yet been sniffed. In these tests we expect the node
|
||||
* metadata to be explicitly sniffed if we need it and we'd
|
||||
* like to hard fail if it is not so we wrap the selector so we
|
||||
* can assert that the data is sniffed.
|
||||
*/
|
||||
NodeSelector delegate = new HasAttributeNodeSelector(key, parser.text());
|
||||
NodeSelector newSelector = new NodeSelector() {
|
||||
@Override
|
||||
public void select(Iterable<Node> nodes) {
|
||||
for (Node node : nodes) {
|
||||
if (node.getAttributes() == null) {
|
||||
throw new IllegalStateException("expected [attributes] metadata to be set but got "
|
||||
+ node);
|
||||
}
|
||||
}
|
||||
delegate.select(nodes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return delegate.toString();
|
||||
}
|
||||
};
|
||||
result = result == NodeSelector.ANY ?
|
||||
newSelector : new ComposeNodeSelector(result, newSelector);
|
||||
} else {
|
||||
|
|
|
@ -540,6 +540,15 @@ public class DoSectionTests extends AbstractClientYamlTestFragmentParserTestCase
|
|||
doSection.execute(context);
|
||||
verify(context).callApi("indices.get_field_mapping", singletonMap("index", "test_index"),
|
||||
emptyList(), emptyMap(), doSection.getApiCallSection().getNodeSelector());
|
||||
|
||||
{
|
||||
List<Node> badNodes = new ArrayList<>();
|
||||
badNodes.add(new Node(new HttpHost("dummy")));
|
||||
Exception e = expectThrows(IllegalStateException.class, () ->
|
||||
doSection.getApiCallSection().getNodeSelector().select(badNodes));
|
||||
assertEquals("expected [version] metadata to be set but got [host=http://dummy]",
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private static Node nodeWithVersion(String version) {
|
||||
|
@ -568,6 +577,14 @@ public class DoSectionTests extends AbstractClientYamlTestFragmentParserTestCase
|
|||
doSection.getApiCallSection().getNodeSelector().select(nodes);
|
||||
assertEquals(Arrays.asList(hasAttr), nodes);
|
||||
}
|
||||
{
|
||||
List<Node> badNodes = new ArrayList<>();
|
||||
badNodes.add(new Node(new HttpHost("dummy")));
|
||||
Exception e = expectThrows(IllegalStateException.class, () ->
|
||||
doSection.getApiCallSection().getNodeSelector().select(badNodes));
|
||||
assertEquals("expected [attributes] metadata to be set but got [host=http://dummy]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
parser = createParser(YamlXContent.yamlXContent,
|
||||
"node_selector:\n" +
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
[role="xpack"]
|
||||
[[ml-configuring-detector-custom-rules]]
|
||||
=== Customizing detectors with rules and filters
|
||||
=== Customizing detectors with custom rules
|
||||
|
||||
<<ml-rules,Rules and filters>> enable you to change the behavior of anomaly
|
||||
<<ml-rules,Custom rules>> enable you to change the behavior of anomaly
|
||||
detectors based on domain-specific knowledge.
|
||||
|
||||
Rules describe _when_ a detector should take a certain _action_ instead
|
||||
Custom rules describe _when_ a detector should take a certain _action_ instead
|
||||
of following its default behavior. To specify the _when_ a rule uses
|
||||
a `scope` and `conditions`. You can think of `scope` as the categorical
|
||||
specification of a rule, while `conditions` are the numerical part.
|
||||
|
@ -14,7 +14,7 @@ scope and conditions.
|
|||
|
||||
Let us see how those can be configured by examples.
|
||||
|
||||
==== Specifying rule scope
|
||||
==== Specifying custom rule scope
|
||||
|
||||
Let us assume we are configuring a job in order to detect DNS data exfiltration.
|
||||
Our data contain fields "subdomain" and "highest_registered_domain".
|
||||
|
@ -127,7 +127,7 @@ PUT _xpack/ml/anomaly_detectors/scoping_multiple_fields
|
|||
Such a detector will skip results when the values of all 3 scoped fields
|
||||
are included in the referenced filters.
|
||||
|
||||
==== Specifying rule conditions
|
||||
==== Specifying custom rule conditions
|
||||
|
||||
Imagine a detector that looks for anomalies in CPU utilization.
|
||||
Given a machine that is idle for long enough, small movement in CPU could
|
||||
|
@ -206,9 +206,9 @@ PUT _xpack/ml/anomaly_detectors/rule_with_range
|
|||
----------------------------------
|
||||
// CONSOLE
|
||||
|
||||
==== Rules in the life-cycle of a job
|
||||
==== Custom rules in the life-cycle of a job
|
||||
|
||||
Rules only affect results created after the rules were applied.
|
||||
Custom rules only affect results created after the rules were applied.
|
||||
Let us imagine that we have configured a job and it has been running
|
||||
for some time. After observing its results we decide that we can employ
|
||||
rules in order to get rid of some uninteresting results. We can use
|
||||
|
@ -216,7 +216,7 @@ the {ref}/ml-update-job.html[update job API] to do so. However, the rule we
|
|||
added will only be in effect for any results created from the moment we added
|
||||
the rule onwards. Past results will remain unaffected.
|
||||
|
||||
==== Using rules VS filtering data
|
||||
==== Using custom rules VS filtering data
|
||||
|
||||
It might appear like using rules is just another way of filtering the data
|
||||
that feeds into a job. For example, a rule that skips results when the
|
||||
|
|
|
@ -13,4 +13,4 @@ A filter resource has the following properties:
|
|||
`items`::
|
||||
(array of strings) An array of strings which is the filter item list.
|
||||
|
||||
For more information, see {stack-ov}/ml-rules.html[Machine learning rules and filters].
|
||||
For more information, see {stack-ov}/ml-rules.html[Machine learning custom rules].
|
||||
|
|
|
@ -18,7 +18,7 @@ Retrieves filters.
|
|||
===== Description
|
||||
|
||||
You can get a single filter or all filters. For more information, see
|
||||
{stack-ov}/ml-rules.html[Machine learning rules and filters].
|
||||
{stack-ov}/ml-rules.html[Machine learning custom rules].
|
||||
|
||||
|
||||
==== Path Parameters
|
||||
|
|
|
@ -265,7 +265,7 @@ NOTE: The `field_name` cannot contain double quotes or backslashes.
|
|||
when there is no value for the by or partition fields. The default value is `false`.
|
||||
|
||||
`custom_rules`::
|
||||
(array) An array of rule objects, which enable customizing how the detector works.
|
||||
(array) An array of custom rule objects, which enable customizing how the detector works.
|
||||
For example, a rule may dictate to the detector conditions under which results should be skipped.
|
||||
For more information see <<ml-detector-custom-rule,detector custom rule objects>>. +
|
||||
+
|
||||
|
@ -420,7 +420,7 @@ For more information, see
|
|||
{stack-ov}/ml-rules.html[Custom rules] enable you to customize the way detectors
|
||||
operate.
|
||||
|
||||
A rule has the following properties:
|
||||
A custom rule has the following properties:
|
||||
|
||||
`actions`::
|
||||
(array) The set of actions to be triggered when the rule applies.
|
||||
|
|
|
@ -58,8 +58,7 @@ public class XDocsClientYamlTestSuiteIT extends XPackRestIT {
|
|||
final List<HttpHost> hosts,
|
||||
final Version esVersion,
|
||||
final Version masterVersion) {
|
||||
return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion,
|
||||
restClientBuilder -> configureClient(restClientBuilder, restClientSettings()));
|
||||
return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, this::getClientBuilderWithSniffedHosts);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -39,6 +39,10 @@ dependencies {
|
|||
// security deps
|
||||
shadow 'com.unboundid:unboundid-ldapsdk:3.2.0'
|
||||
shadow project(path: ':modules:transport-netty4', configuration: 'runtime')
|
||||
shadow(project(path: ':plugins:transport-nio', configuration: 'runtime')) {
|
||||
// TODO: core exclusion should not be necessary, since it is a transitive dep of all plugins
|
||||
exclude group: "org.elasticsearch", module: "elasticsearch-core"
|
||||
}
|
||||
|
||||
testCompile 'org.elasticsearch:securemock:1.2'
|
||||
testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}"
|
||||
|
|
|
@ -6,27 +6,13 @@
|
|||
package org.elasticsearch.xpack.core.upgrade.actions;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
||||
public class IndexUpgradeInfoAction extends Action<IndexUpgradeInfoAction.Response> {
|
||||
public class IndexUpgradeInfoAction extends Action<IndexUpgradeInfoResponse> {
|
||||
|
||||
public static final IndexUpgradeInfoAction INSTANCE = new IndexUpgradeInfoAction();
|
||||
public static final String NAME = "cluster:admin/xpack/upgrade/info";
|
||||
|
@ -36,149 +22,15 @@ public class IndexUpgradeInfoAction extends Action<IndexUpgradeInfoAction.Respon
|
|||
}
|
||||
|
||||
@Override
|
||||
public Response newResponse() {
|
||||
return new Response();
|
||||
public IndexUpgradeInfoResponse newResponse() {
|
||||
return new IndexUpgradeInfoResponse();
|
||||
}
|
||||
|
||||
public static class Response extends ActionResponse implements ToXContentObject {
|
||||
private Map<String, UpgradeActionRequired> actions;
|
||||
|
||||
public Response() {
|
||||
|
||||
}
|
||||
|
||||
public Response(Map<String, UpgradeActionRequired> actions) {
|
||||
this.actions = actions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
actions = in.readMap(StreamInput::readString, UpgradeActionRequired::readFromStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeMap(actions, StreamOutput::writeString, (out1, value) -> value.writeTo(out1));
|
||||
}
|
||||
|
||||
public Map<String, UpgradeActionRequired> getActions() {
|
||||
return actions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startObject("indices");
|
||||
for (Map.Entry<String, UpgradeActionRequired> entry : actions.entrySet()) {
|
||||
builder.startObject(entry.getKey());
|
||||
{
|
||||
builder.field("action_required", entry.getValue().toString());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Response response = (Response) o;
|
||||
return Objects.equals(actions, response.actions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(actions);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Request extends MasterNodeReadRequest<Request> implements IndicesRequest.Replaceable {
|
||||
|
||||
private String[] indices = null;
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true);
|
||||
|
||||
// for serialization
|
||||
public Request() {
|
||||
|
||||
}
|
||||
|
||||
public Request(String... indices) {
|
||||
this.indices = indices;
|
||||
}
|
||||
|
||||
public Request(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
indices = in.readStringArray();
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringArray(indices);
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] indices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Request indices(String... indices) {
|
||||
this.indices = indices;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndicesOptions indicesOptions() {
|
||||
return indicesOptions;
|
||||
}
|
||||
|
||||
public void indicesOptions(IndicesOptions indicesOptions) {
|
||||
this.indicesOptions = indicesOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
if (indices == null) {
|
||||
validationException = addValidationError("index/indices is missing", validationException);
|
||||
}
|
||||
return validationException;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Request request = (Request) o;
|
||||
return Arrays.equals(indices, request.indices) &&
|
||||
Objects.equals(indicesOptions.toString(), request.indicesOptions.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(Arrays.hashCode(indices), indicesOptions.toString());
|
||||
}
|
||||
}
|
||||
|
||||
public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder<Request, Response, RequestBuilder> {
|
||||
public static class RequestBuilder
|
||||
extends MasterNodeReadOperationRequestBuilder<IndexUpgradeInfoRequest, IndexUpgradeInfoResponse, RequestBuilder> {
|
||||
|
||||
public RequestBuilder(ElasticsearchClient client) {
|
||||
super(client, INSTANCE, new Request());
|
||||
super(client, INSTANCE, new IndexUpgradeInfoRequest());
|
||||
}
|
||||
|
||||
public RequestBuilder setIndices(String... indices) {
|
||||
|
@ -191,5 +43,4 @@ public class IndexUpgradeInfoAction extends Action<IndexUpgradeInfoAction.Respon
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,12 +18,14 @@ import org.elasticsearch.action.support.ActionFilters;
|
|||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.AliasOrIndex;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
|
@ -471,12 +473,25 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
|
|||
protected void masterOperation(OpenJobAction.Request request, ClusterState state, ActionListener<OpenJobAction.Response> listener) {
|
||||
OpenJobAction.JobParams jobParams = request.getJobParams();
|
||||
if (licenseState.isMachineLearningAllowed()) {
|
||||
// Step 5. Wait for job to be started and respond
|
||||
ActionListener<PersistentTasksCustomMetaData.PersistentTask<OpenJobAction.JobParams>> finalListener =
|
||||
|
||||
// Step 6. Clear job finished time once the job is started and respond
|
||||
ActionListener<OpenJobAction.Response> clearJobFinishTime = ActionListener.wrap(
|
||||
response -> {
|
||||
if (response.isAcknowledged()) {
|
||||
clearJobFinishedTime(jobParams.getJobId(), listener);
|
||||
} else {
|
||||
listener.onResponse(response);
|
||||
}
|
||||
},
|
||||
listener::onFailure
|
||||
);
|
||||
|
||||
// Step 5. Wait for job to be started
|
||||
ActionListener<PersistentTasksCustomMetaData.PersistentTask<OpenJobAction.JobParams>> waitForJobToStart =
|
||||
new ActionListener<PersistentTasksCustomMetaData.PersistentTask<OpenJobAction.JobParams>>() {
|
||||
@Override
|
||||
public void onResponse(PersistentTasksCustomMetaData.PersistentTask<OpenJobAction.JobParams> task) {
|
||||
waitForJobStarted(task.getId(), jobParams, listener);
|
||||
waitForJobStarted(task.getId(), jobParams, clearJobFinishTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -492,7 +507,7 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
|
|||
// Step 4. Start job task
|
||||
ActionListener<PutJobAction.Response> establishedMemoryUpdateListener = ActionListener.wrap(
|
||||
response -> persistentTasksService.sendStartRequest(MlTasks.jobTaskId(jobParams.getJobId()),
|
||||
OpenJobAction.TASK_NAME, jobParams, finalListener),
|
||||
OpenJobAction.TASK_NAME, jobParams, waitForJobToStart),
|
||||
listener::onFailure
|
||||
);
|
||||
|
||||
|
@ -574,6 +589,35 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
|
|||
});
|
||||
}
|
||||
|
||||
private void clearJobFinishedTime(String jobId, ActionListener<OpenJobAction.Response> listener) {
|
||||
clusterService.submitStateUpdateTask("clearing-job-finish-time-for-" + jobId, new ClusterStateUpdateTask() {
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) {
|
||||
MlMetadata mlMetadata = MlMetadata.getMlMetadata(currentState);
|
||||
MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(mlMetadata);
|
||||
Job.Builder jobBuilder = new Job.Builder(mlMetadata.getJobs().get(jobId));
|
||||
jobBuilder.setFinishedTime(null);
|
||||
|
||||
mlMetadataBuilder.putJob(jobBuilder.build(), true);
|
||||
ClusterState.Builder builder = ClusterState.builder(currentState);
|
||||
return builder.metaData(new MetaData.Builder(currentState.metaData())
|
||||
.putCustom(MlMetadata.TYPE, mlMetadataBuilder.build()))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(String source, Exception e) {
|
||||
logger.error("[" + jobId + "] Failed to clear finished_time; source [" + source + "]", e);
|
||||
listener.onResponse(new OpenJobAction.Response(true));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clusterStateProcessed(String source, ClusterState oldState,
|
||||
ClusterState newState) {
|
||||
listener.onResponse(new OpenJobAction.Response(true));
|
||||
}
|
||||
});
|
||||
}
|
||||
private void cancelJobStart(PersistentTasksCustomMetaData.PersistentTask<OpenJobAction.JobParams> persistentTask, Exception exception,
|
||||
ActionListener<OpenJobAction.Response> listener) {
|
||||
persistentTasksService.sendRemoveRequest(persistentTask.getId(),
|
||||
|
|
|
@ -46,6 +46,11 @@ class BucketInfluencerNormalizable extends AbstractLeafNormalizable {
|
|||
return bucketInfluencer.getInfluencerFieldName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPersonFieldValue() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFunctionName() {
|
||||
return null;
|
||||
|
|
|
@ -64,6 +64,11 @@ public class BucketNormalizable extends Normalizable {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPersonFieldValue() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFunctionName() {
|
||||
return null;
|
||||
|
|
|
@ -44,6 +44,11 @@ class InfluencerNormalizable extends AbstractLeafNormalizable {
|
|||
return influencer.getInfluencerFieldName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPersonFieldValue() {
|
||||
return influencer.getInfluencerFieldValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFunctionName() {
|
||||
return null;
|
||||
|
|
|
@ -63,10 +63,11 @@ public class MultiplyingNormalizerProcess implements NormalizerProcess {
|
|||
result.setPartitionFieldName(record[1]);
|
||||
result.setPartitionFieldValue(record[2]);
|
||||
result.setPersonFieldName(record[3]);
|
||||
result.setFunctionName(record[4]);
|
||||
result.setValueFieldName(record[5]);
|
||||
result.setProbability(Double.parseDouble(record[6]));
|
||||
result.setNormalizedScore(factor * Double.parseDouble(record[7]));
|
||||
result.setPersonFieldValue(record[4]);
|
||||
result.setFunctionName(record[5]);
|
||||
result.setValueFieldName(record[6]);
|
||||
result.setProbability(Double.parseDouble(record[7]));
|
||||
result.setNormalizedScore(factor * Double.parseDouble(record[8]));
|
||||
} catch (NumberFormatException | ArrayIndexOutOfBoundsException e) {
|
||||
throw new IOException("Unable to write to no-op normalizer", e);
|
||||
}
|
||||
|
|
|
@ -44,6 +44,8 @@ public abstract class Normalizable implements ToXContentObject {
|
|||
|
||||
abstract String getPersonFieldName();
|
||||
|
||||
abstract String getPersonFieldValue();
|
||||
|
||||
abstract String getFunctionName();
|
||||
|
||||
abstract String getValueFieldName();
|
||||
|
|
|
@ -70,6 +70,7 @@ public class Normalizer {
|
|||
NormalizerResult.PARTITION_FIELD_NAME_FIELD.getPreferredName(),
|
||||
NormalizerResult.PARTITION_FIELD_VALUE_FIELD.getPreferredName(),
|
||||
NormalizerResult.PERSON_FIELD_NAME_FIELD.getPreferredName(),
|
||||
NormalizerResult.PERSON_FIELD_VALUE_FIELD.getPreferredName(),
|
||||
NormalizerResult.FUNCTION_NAME_FIELD.getPreferredName(),
|
||||
NormalizerResult.VALUE_FIELD_NAME_FIELD.getPreferredName(),
|
||||
NormalizerResult.PROBABILITY_FIELD.getPreferredName(),
|
||||
|
@ -108,6 +109,7 @@ public class Normalizer {
|
|||
Strings.coalesceToEmpty(normalizable.getPartitionFieldName()),
|
||||
Strings.coalesceToEmpty(normalizable.getPartitionFieldValue()),
|
||||
Strings.coalesceToEmpty(normalizable.getPersonFieldName()),
|
||||
Strings.coalesceToEmpty(normalizable.getPersonFieldValue()),
|
||||
Strings.coalesceToEmpty(normalizable.getFunctionName()),
|
||||
Strings.coalesceToEmpty(normalizable.getValueFieldName()),
|
||||
Double.toString(normalizable.getProbability()),
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.process.normalizer;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -26,6 +27,7 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
static final ParseField PARTITION_FIELD_NAME_FIELD = new ParseField("partition_field_name");
|
||||
static final ParseField PARTITION_FIELD_VALUE_FIELD = new ParseField("partition_field_value");
|
||||
static final ParseField PERSON_FIELD_NAME_FIELD = new ParseField("person_field_name");
|
||||
static final ParseField PERSON_FIELD_VALUE_FIELD = new ParseField("person_field_value");
|
||||
static final ParseField FUNCTION_NAME_FIELD = new ParseField("function_name");
|
||||
static final ParseField VALUE_FIELD_NAME_FIELD = new ParseField("value_field_name");
|
||||
static final ParseField PROBABILITY_FIELD = new ParseField("probability");
|
||||
|
@ -39,6 +41,7 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
PARSER.declareString(NormalizerResult::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(NormalizerResult::setPartitionFieldValue, PARTITION_FIELD_VALUE_FIELD);
|
||||
PARSER.declareString(NormalizerResult::setPersonFieldName, PERSON_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(NormalizerResult::setPersonFieldValue, PERSON_FIELD_VALUE_FIELD);
|
||||
PARSER.declareString(NormalizerResult::setFunctionName, FUNCTION_NAME_FIELD);
|
||||
PARSER.declareString(NormalizerResult::setValueFieldName, VALUE_FIELD_NAME_FIELD);
|
||||
PARSER.declareDouble(NormalizerResult::setProbability, PROBABILITY_FIELD);
|
||||
|
@ -49,6 +52,7 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
private String partitionFieldName;
|
||||
private String partitionFieldValue;
|
||||
private String personFieldName;
|
||||
private String personFieldValue;
|
||||
private String functionName;
|
||||
private String valueFieldName;
|
||||
private double probability;
|
||||
|
@ -62,6 +66,9 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
partitionFieldName = in.readOptionalString();
|
||||
partitionFieldValue = in.readOptionalString();
|
||||
personFieldName = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
personFieldValue = in.readOptionalString();
|
||||
}
|
||||
functionName = in.readOptionalString();
|
||||
valueFieldName = in.readOptionalString();
|
||||
probability = in.readDouble();
|
||||
|
@ -74,6 +81,9 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
out.writeOptionalString(partitionFieldName);
|
||||
out.writeOptionalString(partitionFieldValue);
|
||||
out.writeOptionalString(personFieldName);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
out.writeOptionalString(personFieldValue);
|
||||
}
|
||||
out.writeOptionalString(functionName);
|
||||
out.writeOptionalString(valueFieldName);
|
||||
out.writeDouble(probability);
|
||||
|
@ -87,6 +97,7 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
builder.field(PARTITION_FIELD_NAME_FIELD.getPreferredName(), partitionFieldName);
|
||||
builder.field(PARTITION_FIELD_VALUE_FIELD.getPreferredName(), partitionFieldValue);
|
||||
builder.field(PERSON_FIELD_NAME_FIELD.getPreferredName(), personFieldName);
|
||||
builder.field(PERSON_FIELD_VALUE_FIELD.getPreferredName(), personFieldValue);
|
||||
builder.field(FUNCTION_NAME_FIELD.getPreferredName(), functionName);
|
||||
builder.field(VALUE_FIELD_NAME_FIELD.getPreferredName(), valueFieldName);
|
||||
builder.field(PROBABILITY_FIELD.getPreferredName(), probability);
|
||||
|
@ -127,6 +138,14 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
this.personFieldName = personFieldName;
|
||||
}
|
||||
|
||||
public String getPersonFieldValue() {
|
||||
return personFieldValue;
|
||||
}
|
||||
|
||||
public void setPersonFieldValue(String personFieldValue) {
|
||||
this.personFieldValue = personFieldValue;
|
||||
}
|
||||
|
||||
public String getFunctionName() {
|
||||
return functionName;
|
||||
}
|
||||
|
@ -161,7 +180,7 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(level, partitionFieldName, partitionFieldValue, personFieldName,
|
||||
return Objects.hash(level, partitionFieldName, partitionFieldValue, personFieldName, personFieldValue,
|
||||
functionName, valueFieldName, probability, normalizedScore);
|
||||
}
|
||||
|
||||
|
@ -184,6 +203,7 @@ public class NormalizerResult implements ToXContentObject, Writeable {
|
|||
&& Objects.equals(this.partitionFieldName, that.partitionFieldName)
|
||||
&& Objects.equals(this.partitionFieldValue, that.partitionFieldValue)
|
||||
&& Objects.equals(this.personFieldName, that.personFieldName)
|
||||
&& Objects.equals(this.personFieldValue, that.personFieldValue)
|
||||
&& Objects.equals(this.functionName, that.functionName)
|
||||
&& Objects.equals(this.valueFieldName, that.valueFieldName)
|
||||
&& this.probability == that.probability
|
||||
|
|
|
@ -45,6 +45,11 @@ public class PartitionScoreNormalizable extends AbstractLeafNormalizable {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPersonFieldValue() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFunctionName() {
|
||||
return null;
|
||||
|
|
|
@ -46,6 +46,12 @@ class RecordNormalizable extends AbstractLeafNormalizable {
|
|||
return over != null ? over : record.getByFieldName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPersonFieldValue() {
|
||||
String over = record.getOverFieldValue();
|
||||
return over != null ? over : record.getByFieldValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFunctionName() {
|
||||
return record.getFunction();
|
||||
|
|
|
@ -43,10 +43,18 @@ public class BucketInfluencerNormalizableTests extends ESTestCase {
|
|||
assertNull(new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getPartitionFieldName());
|
||||
}
|
||||
|
||||
public void testGetPartitionFieldValue() {
|
||||
assertNull(new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getPartitionFieldValue());
|
||||
}
|
||||
|
||||
public void testGetPersonFieldName() {
|
||||
assertEquals("airline", new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getPersonFieldName());
|
||||
}
|
||||
|
||||
public void testGetPersonFieldValue() {
|
||||
assertNull(new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getPersonFieldValue());
|
||||
}
|
||||
|
||||
public void testGetFunctionName() {
|
||||
assertNull(new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getFunctionName());
|
||||
}
|
||||
|
|
|
@ -73,6 +73,10 @@ public class BucketNormalizableTests extends ESTestCase {
|
|||
assertNull(new BucketNormalizable(bucket, INDEX_NAME).getPersonFieldName());
|
||||
}
|
||||
|
||||
public void testGetPersonFieldValue() {
|
||||
assertNull(new BucketNormalizable(bucket, INDEX_NAME).getPersonFieldValue());
|
||||
}
|
||||
|
||||
public void testGetFunctionName() {
|
||||
assertNull(new BucketNormalizable(bucket, INDEX_NAME).getFunctionName());
|
||||
}
|
||||
|
|
|
@ -44,6 +44,10 @@ public class InfluencerNormalizableTests extends ESTestCase {
|
|||
assertEquals("airline", new InfluencerNormalizable(influencer, INDEX_NAME).getPersonFieldName());
|
||||
}
|
||||
|
||||
public void testGetPersonFieldValue() {
|
||||
assertEquals("AAL", new InfluencerNormalizable(influencer, INDEX_NAME).getPersonFieldValue());
|
||||
}
|
||||
|
||||
public void testGetFunctionName() {
|
||||
assertNull(new InfluencerNormalizable(influencer, INDEX_NAME).getFunctionName());
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ public class NormalizerResultTests extends AbstractSerializingTestCase<Normalize
|
|||
assertNull(msg.getPartitionFieldName());
|
||||
assertNull(msg.getPartitionFieldValue());
|
||||
assertNull(msg.getPersonFieldName());
|
||||
assertNull(msg.getPersonFieldValue());
|
||||
assertNull(msg.getFunctionName());
|
||||
assertNull(msg.getValueFieldName());
|
||||
assertEquals(0.0, msg.getProbability(), EPSILON);
|
||||
|
@ -32,6 +33,7 @@ public class NormalizerResultTests extends AbstractSerializingTestCase<Normalize
|
|||
msg.setPartitionFieldName("part");
|
||||
msg.setPartitionFieldValue("something");
|
||||
msg.setPersonFieldName("person");
|
||||
msg.setPersonFieldValue("fred");
|
||||
msg.setFunctionName("mean");
|
||||
msg.setValueFieldName("value");
|
||||
msg.setProbability(0.005);
|
||||
|
|
|
@ -179,12 +179,15 @@ public final class KerberosRealm extends Realm implements CachingRealm {
|
|||
|
||||
private void handleException(Exception e, final ActionListener<AuthenticationResult> listener) {
|
||||
if (e instanceof LoginException) {
|
||||
logger.debug("failed to authenticate user, service login failure", e);
|
||||
listener.onResponse(AuthenticationResult.terminate("failed to authenticate user, service login failure",
|
||||
unauthorized(e.getLocalizedMessage(), e)));
|
||||
} else if (e instanceof GSSException) {
|
||||
logger.debug("failed to authenticate user, gss context negotiation failure", e);
|
||||
listener.onResponse(AuthenticationResult.terminate("failed to authenticate user, gss context negotiation failure",
|
||||
unauthorized(e.getLocalizedMessage(), e)));
|
||||
} else {
|
||||
logger.debug("failed to authenticate user", e);
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,11 +13,11 @@ import org.elasticsearch.cluster.service.ClusterService;
|
|||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.reindex.BulkByScrollResponse;
|
||||
import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
import org.elasticsearch.xpack.core.upgrade.IndexUpgradeCheckVersion;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired;
|
||||
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
|
|
|
@ -16,8 +16,8 @@ import org.elasticsearch.common.component.AbstractComponent;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.reindex.BulkByScrollResponse;
|
||||
import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
|
|
@ -17,17 +17,19 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.license.LicenseUtils;
|
||||
import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.XPackField;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction;
|
||||
import org.elasticsearch.xpack.upgrade.IndexUpgradeService;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class TransportIndexUpgradeInfoAction extends TransportMasterNodeReadAction<IndexUpgradeInfoAction.Request,
|
||||
IndexUpgradeInfoAction.Response> {
|
||||
public class TransportIndexUpgradeInfoAction
|
||||
extends TransportMasterNodeReadAction<IndexUpgradeInfoRequest, IndexUpgradeInfoResponse> {
|
||||
|
||||
private final IndexUpgradeService indexUpgradeService;
|
||||
private final XPackLicenseState licenseState;
|
||||
|
@ -40,7 +42,7 @@ public class TransportIndexUpgradeInfoAction extends TransportMasterNodeReadActi
|
|||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
XPackLicenseState licenseState) {
|
||||
super(settings, IndexUpgradeInfoAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
IndexUpgradeInfoAction.Request::new, indexNameExpressionResolver);
|
||||
IndexUpgradeInfoRequest::new, indexNameExpressionResolver);
|
||||
this.indexUpgradeService = indexUpgradeService;
|
||||
this.licenseState = licenseState;
|
||||
}
|
||||
|
@ -51,23 +53,23 @@ public class TransportIndexUpgradeInfoAction extends TransportMasterNodeReadActi
|
|||
}
|
||||
|
||||
@Override
|
||||
protected IndexUpgradeInfoAction.Response newResponse() {
|
||||
return new IndexUpgradeInfoAction.Response();
|
||||
protected IndexUpgradeInfoResponse newResponse() {
|
||||
return new IndexUpgradeInfoResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(IndexUpgradeInfoAction.Request request, ClusterState state) {
|
||||
protected ClusterBlockException checkBlock(IndexUpgradeInfoRequest request, ClusterState state) {
|
||||
// Cluster is not affected but we look up repositories in metadata
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final void masterOperation(final IndexUpgradeInfoAction.Request request, ClusterState state,
|
||||
final ActionListener<IndexUpgradeInfoAction.Response> listener) {
|
||||
protected final void masterOperation(final IndexUpgradeInfoRequest request, ClusterState state,
|
||||
final ActionListener<IndexUpgradeInfoResponse> listener) {
|
||||
if (licenseState.isUpgradeAllowed()) {
|
||||
Map<String, UpgradeActionRequired> results =
|
||||
indexUpgradeService.upgradeInfo(request.indices(), request.indicesOptions(), state);
|
||||
listener.onResponse(new IndexUpgradeInfoAction.Response(results));
|
||||
listener.onResponse(new IndexUpgradeInfoResponse(results));
|
||||
} else {
|
||||
listener.onFailure(LicenseUtils.newComplianceException(XPackField.UPGRADE));
|
||||
}
|
||||
|
|
|
@ -9,12 +9,12 @@ import org.elasticsearch.action.support.IndicesOptions;
|
|||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction.Request;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -41,7 +41,7 @@ public class RestIndexUpgradeInfoAction extends BaseRestHandler {
|
|||
}
|
||||
|
||||
private RestChannelConsumer handleGet(final RestRequest request, NodeClient client) {
|
||||
Request infoRequest = new Request(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
IndexUpgradeInfoRequest infoRequest = new IndexUpgradeInfoRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
infoRequest.indicesOptions(IndicesOptions.fromRequest(request, infoRequest.indicesOptions()));
|
||||
return channel -> client.execute(IndexUpgradeInfoAction.INSTANCE, infoRequest, new RestToXContentListener<>(channel));
|
||||
}
|
||||
|
|
|
@ -14,12 +14,12 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.reindex.BulkByScrollResponse;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeAction;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction.Response;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Collections;
|
||||
|
@ -41,7 +41,7 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase {
|
|||
// Testing only negative case here, the positive test is done in bwcTests
|
||||
assertAcked(client().admin().indices().prepareCreate("test").get());
|
||||
ensureYellow("test");
|
||||
Response response = new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("test").get();
|
||||
IndexUpgradeInfoResponse response = new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("test").get();
|
||||
assertThat(response.getActions().entrySet(), empty());
|
||||
}
|
||||
|
||||
|
@ -57,7 +57,7 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase {
|
|||
() -> new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("test").get());
|
||||
assertThat(e.getMessage(), equalTo("current license is non-compliant for [upgrade]"));
|
||||
enableLicensing();
|
||||
Response response = new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("test").get();
|
||||
IndexUpgradeInfoResponse response = new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("test").get();
|
||||
assertThat(response.getActions().entrySet(), empty());
|
||||
}
|
||||
|
||||
|
@ -132,7 +132,7 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase {
|
|||
|
||||
public void testIndexUpgradeInfoOnEmptyCluster() {
|
||||
// On empty cluster asking for all indices shouldn't fail since no indices means nothing needs to be upgraded
|
||||
Response response = new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("_all").get();
|
||||
IndexUpgradeInfoResponse response = new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("_all").get();
|
||||
assertThat(response.getActions().entrySet(), empty());
|
||||
|
||||
// but calling on a particular index should fail
|
||||
|
|
|
@ -15,8 +15,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -30,6 +30,8 @@ import org.elasticsearch.plugins.ActionPlugin;
|
|||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.PluginsService;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
|
@ -39,7 +41,6 @@ import org.elasticsearch.script.ScriptType;
|
|||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeField;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeAction;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction;
|
||||
|
@ -171,7 +172,7 @@ public class IndexUpgradeTasksIT extends ESIntegTestCase {
|
|||
ensureYellow("test");
|
||||
|
||||
|
||||
IndexUpgradeInfoAction.Response infoResponse = new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("test").get();
|
||||
IndexUpgradeInfoResponse infoResponse = new IndexUpgradeInfoAction.RequestBuilder(client()).setIndices("test").get();
|
||||
assertThat(infoResponse.getActions().keySet(), contains("test"));
|
||||
assertThat(infoResponse.getActions().get("test"), equalTo(UpgradeActionRequired.UPGRADE));
|
||||
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.upgrade.actions;
|
||||
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction.Request;
|
||||
|
||||
public class IndexUpgradeInfoActionRequestTests extends AbstractWireSerializingTestCase<Request> {
|
||||
@Override
|
||||
protected Request createTestInstance() {
|
||||
int indexCount = randomInt(4);
|
||||
String[] indices = new String[indexCount];
|
||||
for (int i = 0; i < indexCount; i++) {
|
||||
indices[i] = randomAlphaOfLength(10);
|
||||
}
|
||||
Request request = new Request(indices);
|
||||
if (randomBoolean()) {
|
||||
request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Writeable.Reader<Request> instanceReader() {
|
||||
return Request::new;
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.upgrade.actions;
|
||||
|
||||
import org.elasticsearch.test.AbstractStreamableTestCase;
|
||||
import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired;
|
||||
import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction.Response;
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class IndexUpgradeInfoActionResponseTests extends AbstractStreamableTestCase<Response> {
|
||||
|
||||
|
||||
@Override
|
||||
protected Response createTestInstance() {
|
||||
int actionsCount = randomIntBetween(0, 5);
|
||||
Map<String, UpgradeActionRequired> actions = new HashMap<>(actionsCount);
|
||||
for (int i = 0; i < actionsCount; i++) {
|
||||
actions.put(randomAlphaOfLength(10), randomFrom(EnumSet.allOf(UpgradeActionRequired.class)));
|
||||
}
|
||||
return new Response(actions);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Response createBlankInstance() {
|
||||
return new Response();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
public class IndexUpgradeInfoRequest extends MasterNodeReadRequest<IndexUpgradeInfoRequest> implements IndicesRequest.Replaceable {
|
||||
|
||||
private String[] indices = Strings.EMPTY_ARRAY;
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true);
|
||||
|
||||
public IndexUpgradeInfoRequest(String... indices) {
|
||||
indices(indices);
|
||||
}
|
||||
|
||||
public IndexUpgradeInfoRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
indices = in.readStringArray();
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringArray(indices);
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] indices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexUpgradeInfoRequest indices(String... indices) {
|
||||
this.indices = Objects.requireNonNull(indices, "indices cannot be null");
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndicesOptions indicesOptions() {
|
||||
return indicesOptions;
|
||||
}
|
||||
|
||||
public void indicesOptions(IndicesOptions indicesOptions) {
|
||||
this.indicesOptions = indicesOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
IndexUpgradeInfoRequest request = (IndexUpgradeInfoRequest) o;
|
||||
return Arrays.equals(indices, request.indices) &&
|
||||
Objects.equals(indicesOptions.toString(), request.indicesOptions.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(Arrays.hashCode(indices), indicesOptions.toString());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,133 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
public class IndexUpgradeInfoResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private static final ParseField INDICES = new ParseField("indices");
|
||||
private static final ParseField ACTION_REQUIRED = new ParseField("action_required");
|
||||
|
||||
private static final ConstructingObjectParser<IndexUpgradeInfoResponse, String> PARSER =
|
||||
new ConstructingObjectParser<>("IndexUpgradeInfoResponse",
|
||||
true,
|
||||
(a, c) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> map = (Map<String, Object>)a[0];
|
||||
Map<String, UpgradeActionRequired> actionsRequired = map.entrySet().stream()
|
||||
.filter(e -> {
|
||||
if (e.getValue() instanceof Map == false) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> value =(Map<String, Object>)e.getValue();
|
||||
return value.containsKey(ACTION_REQUIRED.getPreferredName());
|
||||
})
|
||||
.collect(Collectors.toMap(
|
||||
Map.Entry::getKey,
|
||||
e -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> value = (Map<String, Object>) e.getValue();
|
||||
return UpgradeActionRequired.fromString((String)value.get(ACTION_REQUIRED.getPreferredName()));
|
||||
}
|
||||
));
|
||||
return new IndexUpgradeInfoResponse(actionsRequired);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareObject(constructorArg(), (p, c) -> p.map(), INDICES);
|
||||
}
|
||||
|
||||
|
||||
private Map<String, UpgradeActionRequired> actions;
|
||||
|
||||
public IndexUpgradeInfoResponse() {
|
||||
|
||||
}
|
||||
|
||||
public IndexUpgradeInfoResponse(Map<String, UpgradeActionRequired> actions) {
|
||||
this.actions = actions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
actions = in.readMap(StreamInput::readString, UpgradeActionRequired::readFromStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeMap(actions, StreamOutput::writeString, (out1, value) -> value.writeTo(out1));
|
||||
}
|
||||
|
||||
public Map<String, UpgradeActionRequired> getActions() {
|
||||
return actions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startObject(INDICES.getPreferredName());
|
||||
for (Map.Entry<String, UpgradeActionRequired> entry : actions.entrySet()) {
|
||||
builder.startObject(entry.getKey());
|
||||
{
|
||||
builder.field(ACTION_REQUIRED.getPreferredName(), entry.getValue().toString());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
IndexUpgradeInfoResponse response = (IndexUpgradeInfoResponse) o;
|
||||
return Objects.equals(actions, response.actions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(actions);
|
||||
}
|
||||
|
||||
public static IndexUpgradeInfoResponse fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
}
|
|
@ -1,9 +1,22 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.upgrade;
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Request and Response objects for the default distribution's Migration
|
||||
* APIs.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
|
||||
public class IndexUpgradeInfoRequestTests extends AbstractWireSerializingTestCase<IndexUpgradeInfoRequest> {
|
||||
@Override
|
||||
protected IndexUpgradeInfoRequest createTestInstance() {
|
||||
int indexCount = randomInt(4);
|
||||
String[] indices = new String[indexCount];
|
||||
for (int i = 0; i < indexCount; i++) {
|
||||
indices[i] = randomAlphaOfLength(10);
|
||||
}
|
||||
IndexUpgradeInfoRequest request = new IndexUpgradeInfoRequest(indices);
|
||||
if (randomBoolean()) {
|
||||
request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Writeable.Reader<IndexUpgradeInfoRequest> instanceReader() {
|
||||
return IndexUpgradeInfoRequest::new;
|
||||
}
|
||||
|
||||
public void testNullIndices() {
|
||||
expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest((String[])null));
|
||||
expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest().indices((String[])null));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
public class IndexUpgradeInfoResponseTests extends AbstractStreamableXContentTestCase<IndexUpgradeInfoResponse> {
|
||||
@Override
|
||||
protected IndexUpgradeInfoResponse doParseInstance(XContentParser parser) {
|
||||
return IndexUpgradeInfoResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndexUpgradeInfoResponse createBlankInstance() {
|
||||
return new IndexUpgradeInfoResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndexUpgradeInfoResponse createTestInstance() {
|
||||
return randomIndexUpgradeInfoResponse(randomIntBetween(0, 10));
|
||||
}
|
||||
|
||||
private static IndexUpgradeInfoResponse randomIndexUpgradeInfoResponse(int numIndices) {
|
||||
Map<String, UpgradeActionRequired> actions = new HashMap<>();
|
||||
for (int i = 0; i < numIndices; i++) {
|
||||
actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values()));
|
||||
}
|
||||
return new IndexUpgradeInfoResponse(actions);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IndexUpgradeInfoResponse mutateInstance(IndexUpgradeInfoResponse instance) {
|
||||
if (instance.getActions().size() == 0) {
|
||||
return randomIndexUpgradeInfoResponse(1);
|
||||
}
|
||||
Map<String, UpgradeActionRequired> actions = new HashMap<>(instance.getActions());
|
||||
if (randomBoolean()) {
|
||||
Iterator<Map.Entry<String, UpgradeActionRequired>> iterator = actions.entrySet().iterator();
|
||||
iterator.next();
|
||||
iterator.remove();
|
||||
} else {
|
||||
actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values()));
|
||||
}
|
||||
return new IndexUpgradeInfoResponse(actions);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.integration;
|
||||
|
||||
import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.DataDescription;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Detector;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.junit.After;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.CoreMatchers.nullValue;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class ReopenJobResetsFinishedTimeIT extends MlNativeAutodetectIntegTestCase {
|
||||
|
||||
@After
|
||||
public void cleanUpTest() {
|
||||
cleanUp();
|
||||
}
|
||||
|
||||
public void test() {
|
||||
final String jobId = "reset-finished-time-test";
|
||||
Job.Builder job = createJob(jobId);
|
||||
|
||||
registerJob(job);
|
||||
putJob(job);
|
||||
openJob(job.getId());
|
||||
|
||||
assertThat(getSingleJob(jobId).getFinishedTime(), is(nullValue()));
|
||||
|
||||
closeJob(jobId);
|
||||
assertThat(getSingleJob(jobId).getFinishedTime(), is(notNullValue()));
|
||||
|
||||
openJob(jobId);
|
||||
assertThat(getSingleJob(jobId).getFinishedTime(), is(nullValue()));
|
||||
}
|
||||
|
||||
private Job getSingleJob(String jobId) {
|
||||
return getJob(jobId).get(0);
|
||||
}
|
||||
|
||||
private Job.Builder createJob(String id) {
|
||||
DataDescription.Builder dataDescription = new DataDescription.Builder();
|
||||
dataDescription.setFormat(DataDescription.DataFormat.XCONTENT);
|
||||
dataDescription.setTimeFormat(DataDescription.EPOCH_MS);
|
||||
|
||||
Detector.Builder d = new Detector.Builder("count", null);
|
||||
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
|
||||
|
||||
Job.Builder builder = new Job.Builder();
|
||||
builder.setId(id);
|
||||
builder.setAnalysisConfig(analysisConfig);
|
||||
builder.setDataDescription(dataDescription);
|
||||
return builder;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue