Merge remote-tracking branch 'es/7.x' into enrich-7.x
This commit is contained in:
commit
7ffe2e7e63
|
@ -843,7 +843,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
test.jvmArgumentProviders.add(nonInputProperties)
|
||||
test.extensions.getByType(ExtraPropertiesExtension).set('nonInputProperties', nonInputProperties)
|
||||
test.extensions.add('nonInputProperties', nonInputProperties)
|
||||
|
||||
test.executable = "${ext.get('runtimeJavaHome')}/bin/java"
|
||||
test.workingDir = project.file("${project.buildDir}/testrun/${test.name}")
|
||||
|
@ -865,7 +865,8 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
// we use './temp' since this is per JVM and tests are forbidden from writing to CWD
|
||||
test.systemProperties 'java.io.tmpdir': './temp',
|
||||
test.systemProperties 'gradle.dist.lib': new File(project.class.location.toURI()).parent,
|
||||
'java.io.tmpdir': './temp',
|
||||
'java.awt.headless': 'true',
|
||||
'tests.gradle': 'true',
|
||||
'tests.artifact': project.name,
|
||||
|
@ -881,7 +882,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
// don't track these as inputs since they contain absolute paths and break cache relocatability
|
||||
nonInputProperties.systemProperty('gradle.dist.lib', new File(project.class.location.toURI()).parent)
|
||||
nonInputProperties.systemProperty('gradle.worker.jar', "${project.gradle.getGradleUserHomeDir()}/caches/${project.gradle.gradleVersion}/workerMain/gradle-worker.jar")
|
||||
nonInputProperties.systemProperty('gradle.user.home', project.gradle.getGradleUserHomeDir())
|
||||
|
||||
|
@ -901,6 +901,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
// TODO: remove this once cname is prepended to transport.publish_address by default in 8.0
|
||||
test.systemProperty 'es.transport.cname_in_publish_address', 'true'
|
||||
|
||||
// Set netty system properties to the properties we configure in jvm.options
|
||||
test.systemProperty('io.netty.noUnsafe', 'true')
|
||||
test.systemProperty('io.netty.noKeySetOptimization', 'true')
|
||||
test.systemProperty('io.netty.recycler.maxCapacityPerThread', '0')
|
||||
test.systemProperty('io.netty.allocator.numDirectArenas', '0')
|
||||
|
||||
test.testLogging { TestLoggingContainer logging ->
|
||||
logging.showExceptions = true
|
||||
logging.showCauses = true
|
||||
|
@ -1007,19 +1013,4 @@ class BuildPlugin implements Plugin<Project> {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
private static class SystemPropertyCommandLineArgumentProvider implements CommandLineArgumentProvider {
|
||||
private final Map<String, Object> systemProperties = [:]
|
||||
|
||||
void systemProperty(String key, Object value) {
|
||||
systemProperties.put(key, value)
|
||||
}
|
||||
|
||||
@Override
|
||||
Iterable<String> asArguments() {
|
||||
return systemProperties.collect { key, value ->
|
||||
"-D${key}=${value.toString()}".toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -206,8 +206,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
|
|||
response(snippet)
|
||||
return
|
||||
}
|
||||
if (snippet.test || snippet.console ||
|
||||
snippet.language == 'console') {
|
||||
if ((snippet.language == 'js') && (snippet.console)) {
|
||||
throw new InvalidUserDataException(
|
||||
"$snippet: Use `[source,console]` instead of `// CONSOLE`.")
|
||||
}
|
||||
if (snippet.test || snippet.language == 'console') {
|
||||
test(snippet)
|
||||
previousTest = snippet
|
||||
return
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
package org.elasticsearch.gradle;
|
||||
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.process.CommandLineArgumentProvider;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class SystemPropertyCommandLineArgumentProvider implements CommandLineArgumentProvider {
|
||||
private final Map<String, Object> systemProperties = new LinkedHashMap<>();
|
||||
|
||||
public void systemProperty(String key, Object value) {
|
||||
systemProperties.put(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<String> asArguments() {
|
||||
return systemProperties.entrySet()
|
||||
.stream()
|
||||
.map(entry -> "-D" + entry.getKey() + "=" + entry.getValue())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
// Track system property keys as an input so our build cache key will change if we add properties but values are still ignored
|
||||
@Input
|
||||
public Iterable<String> getPropertyNames() {
|
||||
return systemProperties.keySet();
|
||||
}
|
||||
}
|
|
@ -70,19 +70,13 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||
}
|
||||
|
||||
@Input
|
||||
public Map<String, Set<File>> classFilesPerEnabledTask(FileTree testClassFiles) {
|
||||
Map<String, Set<File>> collector = new HashMap<>();
|
||||
|
||||
// Gradle Test
|
||||
collector.putAll(
|
||||
getProject().getTasks().withType(Test.class).stream()
|
||||
.filter(Task::getEnabled)
|
||||
.collect(Collectors.toMap(
|
||||
Task::getPath,
|
||||
task -> task.getCandidateClassFiles().getFiles()
|
||||
))
|
||||
);
|
||||
return Collections.unmodifiableMap(collector);
|
||||
public Map<String, Set<File>> getClassFilesPerEnabledTask() {
|
||||
return getProject().getTasks().withType(Test.class).stream()
|
||||
.filter(Task::getEnabled)
|
||||
.collect(Collectors.toMap(
|
||||
Task::getPath,
|
||||
task -> task.getCandidateClassFiles().getFiles()
|
||||
));
|
||||
}
|
||||
|
||||
@Input
|
||||
|
@ -154,7 +148,7 @@ public class TestingConventionsTasks extends DefaultTask {
|
|||
.collect(Collectors.toList())
|
||||
).getAsFileTree();
|
||||
|
||||
final Map<String, Set<File>> classFilesPerTask = classFilesPerEnabledTask(allTestClassFiles);
|
||||
final Map<String, Set<File>> classFilesPerTask = getClassFilesPerEnabledTask();
|
||||
|
||||
final Map<String, Set<Class<?>>> testClassesPerTask = classFilesPerTask.entrySet().stream()
|
||||
.collect(
|
||||
|
|
|
@ -18,20 +18,65 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.testfixtures;
|
||||
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.NamedDomainObjectContainer;
|
||||
import org.gradle.api.Project;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
public class TestFixtureExtension {
|
||||
|
||||
private final Project project;
|
||||
final NamedDomainObjectContainer<Project> fixtures;
|
||||
final Map<String, String> serviceToProjectUseMap = new HashMap<>();
|
||||
|
||||
public TestFixtureExtension(Project project) {
|
||||
this.project = project;
|
||||
this.fixtures = project.container(Project.class);
|
||||
}
|
||||
|
||||
public void useFixture() {
|
||||
useFixture(this.project.getPath());
|
||||
}
|
||||
|
||||
public void useFixture(String path) {
|
||||
addFixtureProject(path);
|
||||
serviceToProjectUseMap.put(path, this.project.getPath());
|
||||
}
|
||||
|
||||
public void useFixture(String path, String serviceName) {
|
||||
addFixtureProject(path);
|
||||
String key = getServiceNameKey(path, serviceName);
|
||||
serviceToProjectUseMap.put(key, this.project.getPath());
|
||||
|
||||
Optional<String> otherProject = this.findOtherProjectUsingService(key);
|
||||
if (otherProject.isPresent()) {
|
||||
throw new GradleException(
|
||||
"Projects " + otherProject.get() + " and " + this.project.getPath() + " both claim the "+ serviceName +
|
||||
" service defined in the docker-compose.yml of " + path + "This is not supported because it breaks " +
|
||||
"running in parallel. Configure dedicated services for each project and use those instead."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private String getServiceNameKey(String fixtureProjectPath, String serviceName) {
|
||||
return fixtureProjectPath + "::" + serviceName;
|
||||
}
|
||||
|
||||
private Optional<String> findOtherProjectUsingService(String serviceName) {
|
||||
return this.project.getRootProject().getAllprojects().stream()
|
||||
.filter(p -> p.equals(this.project) == false)
|
||||
.filter(p -> p.getExtensions().findByType(TestFixtureExtension.class) != null)
|
||||
.map(project -> project.getExtensions().getByType(TestFixtureExtension.class))
|
||||
.flatMap(ext -> ext.serviceToProjectUseMap.entrySet().stream())
|
||||
.filter(entry -> entry.getKey().equals(serviceName))
|
||||
.map(Map.Entry::getValue)
|
||||
.findAny();
|
||||
}
|
||||
|
||||
private void addFixtureProject(String path) {
|
||||
Project fixtureProject = this.project.findProject(path);
|
||||
if (fixtureProject == null) {
|
||||
throw new IllegalArgumentException("Could not find test fixture " + fixtureProject);
|
||||
|
@ -42,6 +87,20 @@ public class TestFixtureExtension {
|
|||
);
|
||||
}
|
||||
fixtures.add(fixtureProject);
|
||||
// Check for exclusive access
|
||||
Optional<String> otherProject = this.findOtherProjectUsingService(path);
|
||||
if (otherProject.isPresent()) {
|
||||
throw new GradleException("Projects " + otherProject.get() + " and " + this.project.getPath() + " both " +
|
||||
"claim all services from " + path + ". This is not supported because it breaks running in parallel. " +
|
||||
"Configure specific services in docker-compose.yml for each and add the service name to `useFixture`"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
boolean isServiceRequired(String serviceName, String fixtureProject) {
|
||||
if (serviceToProjectUseMap.containsKey(fixtureProject)) {
|
||||
return true;
|
||||
}
|
||||
return serviceToProjectUseMap.containsKey(getServiceNameKey(fixtureProject, serviceName));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,9 +20,12 @@ package org.elasticsearch.gradle.testfixtures;
|
|||
|
||||
import com.avast.gradle.dockercompose.ComposeExtension;
|
||||
import com.avast.gradle.dockercompose.DockerComposePlugin;
|
||||
import com.avast.gradle.dockercompose.ServiceInfo;
|
||||
import com.avast.gradle.dockercompose.tasks.ComposeUp;
|
||||
import org.elasticsearch.gradle.OS;
|
||||
import org.elasticsearch.gradle.SystemPropertyCommandLineArgumentProvider;
|
||||
import org.elasticsearch.gradle.precommit.TestingConventionsTasks;
|
||||
import org.gradle.api.Action;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
|
@ -56,9 +59,6 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
ext.set("testFixturesDir", testfixturesDir);
|
||||
|
||||
if (project.file(DOCKER_COMPOSE_YML).exists()) {
|
||||
// the project that defined a test fixture can also use it
|
||||
extension.fixtures.add(project);
|
||||
|
||||
Task buildFixture = project.getTasks().create("buildFixture");
|
||||
Task pullFixture = project.getTasks().create("pullFixture");
|
||||
Task preProcessFixture = project.getTasks().create("preProcessFixture");
|
||||
|
@ -104,6 +104,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
configureServiceInfoForTask(
|
||||
postProcessFixture,
|
||||
project,
|
||||
false,
|
||||
(name, port) -> postProcessFixture.getExtensions()
|
||||
.getByType(ExtraPropertiesExtension.class).set(name, port)
|
||||
);
|
||||
|
@ -142,7 +143,9 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
configureServiceInfoForTask(
|
||||
task,
|
||||
fixtureProject,
|
||||
task::systemProperty
|
||||
true,
|
||||
(name, host) ->
|
||||
task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host)
|
||||
);
|
||||
task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture"));
|
||||
})
|
||||
|
@ -162,31 +165,44 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
);
|
||||
}
|
||||
|
||||
private void configureServiceInfoForTask(Task task, Project fixtureProject, BiConsumer<String, Integer> consumer) {
|
||||
private void configureServiceInfoForTask(
|
||||
Task task, Project fixtureProject, boolean enableFilter, BiConsumer<String, Integer> consumer
|
||||
) {
|
||||
// Configure ports for the tests as system properties.
|
||||
// We only know these at execution time so we need to do it in doFirst
|
||||
task.doFirst(theTask ->
|
||||
fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos()
|
||||
.forEach((service, infos) -> {
|
||||
infos.getTcpPorts()
|
||||
.forEach((container, host) -> {
|
||||
String name = "test.fixtures." + service + ".tcp." + container;
|
||||
theTask.getLogger().info("port mapping property: {}={}", name, host);
|
||||
consumer.accept(
|
||||
name,
|
||||
host
|
||||
);
|
||||
});
|
||||
infos.getUdpPorts()
|
||||
.forEach((container, host) -> {
|
||||
String name = "test.fixtures." + service + ".udp." + container;
|
||||
theTask.getLogger().info("port mapping property: {}={}", name, host);
|
||||
consumer.accept(
|
||||
name,
|
||||
host
|
||||
);
|
||||
});
|
||||
})
|
||||
TestFixtureExtension extension = task.getProject().getExtensions().getByType(TestFixtureExtension.class);
|
||||
task.doFirst(new Action<Task>() {
|
||||
@Override
|
||||
public void execute(Task theTask) {
|
||||
fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos()
|
||||
.entrySet().stream()
|
||||
.filter(entry -> enableFilter == false ||
|
||||
extension.isServiceRequired(entry.getKey(), fixtureProject.getPath())
|
||||
)
|
||||
.forEach(entry -> {
|
||||
String service = entry.getKey();
|
||||
ServiceInfo infos = entry.getValue();
|
||||
infos.getTcpPorts()
|
||||
.forEach((container, host) -> {
|
||||
String name = "test.fixtures." + service + ".tcp." + container;
|
||||
theTask.getLogger().info("port mapping property: {}={}", name, host);
|
||||
consumer.accept(
|
||||
name,
|
||||
host
|
||||
);
|
||||
});
|
||||
infos.getUdpPorts()
|
||||
.forEach((container, host) -> {
|
||||
String name = "test.fixtures." + service + ".udp." + container;
|
||||
theTask.getLogger().info("port mapping property: {}={}", name, host);
|
||||
consumer.accept(
|
||||
name,
|
||||
host
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -204,7 +204,7 @@ final class IndexLifecycleRequestConverters {
|
|||
}
|
||||
|
||||
static Request executeSnapshotLifecyclePolicy(ExecuteSnapshotLifecyclePolicyRequest executeSnapshotLifecyclePolicyRequest) {
|
||||
Request request = new Request(HttpPut.METHOD_NAME,
|
||||
Request request = new Request(HttpPost.METHOD_NAME,
|
||||
new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_slm/policy")
|
||||
.addPathPartAsIs(executeSnapshotLifecyclePolicyRequest.getPolicyId())
|
||||
|
|
|
@ -553,6 +553,10 @@ final class RequestConverters {
|
|||
return prepareReindexRequest(reindexRequest, false);
|
||||
}
|
||||
|
||||
static Request submitDeleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException {
|
||||
return prepareDeleteByQueryRequest(deleteByQueryRequest, false);
|
||||
}
|
||||
|
||||
private static Request prepareReindexRequest(ReindexRequest reindexRequest, boolean waitForCompletion) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
@ -572,6 +576,36 @@ final class RequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest,
|
||||
boolean waitForCompletion) throws IOException {
|
||||
String endpoint =
|
||||
endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params()
|
||||
.withRouting(deleteByQueryRequest.getRouting())
|
||||
.withRefresh(deleteByQueryRequest.isRefresh())
|
||||
.withTimeout(deleteByQueryRequest.getTimeout())
|
||||
.withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards())
|
||||
.withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond())
|
||||
.withIndicesOptions(deleteByQueryRequest.indicesOptions())
|
||||
.withWaitForCompletion(waitForCompletion);
|
||||
if (deleteByQueryRequest.isAbortOnVersionConflict() == false) {
|
||||
params.putParam("conflicts", "proceed");
|
||||
}
|
||||
if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) {
|
||||
params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize()));
|
||||
}
|
||||
if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
|
||||
params.putParam("scroll", deleteByQueryRequest.getScrollTime());
|
||||
}
|
||||
if (deleteByQueryRequest.getMaxDocs() > 0) {
|
||||
params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request updateByQuery(UpdateByQueryRequest updateByQueryRequest) throws IOException {
|
||||
String endpoint =
|
||||
endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
|
||||
|
@ -602,31 +636,7 @@ final class RequestConverters {
|
|||
}
|
||||
|
||||
static Request deleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException {
|
||||
String endpoint =
|
||||
endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params()
|
||||
.withRouting(deleteByQueryRequest.getRouting())
|
||||
.withRefresh(deleteByQueryRequest.isRefresh())
|
||||
.withTimeout(deleteByQueryRequest.getTimeout())
|
||||
.withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards())
|
||||
.withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond())
|
||||
.withIndicesOptions(deleteByQueryRequest.indicesOptions());
|
||||
if (deleteByQueryRequest.isAbortOnVersionConflict() == false) {
|
||||
params.putParam("conflicts", "proceed");
|
||||
}
|
||||
if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) {
|
||||
params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize()));
|
||||
}
|
||||
if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
|
||||
params.putParam("scroll", deleteByQueryRequest.getScrollTime());
|
||||
}
|
||||
if (deleteByQueryRequest.getMaxDocs() > 0) {
|
||||
params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
return prepareDeleteByQueryRequest(deleteByQueryRequest, true);
|
||||
}
|
||||
|
||||
static Request rethrottleReindex(RethrottleRequest rethrottleRequest) {
|
||||
|
|
|
@ -256,7 +256,7 @@ public class RestHighLevelClient implements Closeable {
|
|||
private final IndexLifecycleClient ilmClient = new IndexLifecycleClient(this);
|
||||
private final RollupClient rollupClient = new RollupClient(this);
|
||||
private final CcrClient ccrClient = new CcrClient(this);
|
||||
private final DataFrameClient dataFrameClient = new DataFrameClient(this);
|
||||
private final TransformClient transformClient = new TransformClient(this);
|
||||
private final EnrichClient enrichClient = new EnrichClient(this);
|
||||
|
||||
/**
|
||||
|
@ -478,8 +478,8 @@ public class RestHighLevelClient implements Closeable {
|
|||
*
|
||||
* @return the client wrapper for making Data Frame API calls
|
||||
*/
|
||||
public DataFrameClient dataFrame() {
|
||||
return dataFrameClient;
|
||||
public TransformClient transform() {
|
||||
return transformClient;
|
||||
}
|
||||
|
||||
public EnrichClient enrich() {
|
||||
|
@ -595,6 +595,21 @@ public class RestHighLevelClient implements Closeable {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Submits a delete by query task
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html">
|
||||
* Delete By Query API on elastic.co</a>
|
||||
* @param deleteByQueryRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the submission response
|
||||
*/
|
||||
public final TaskSubmissionResponse submitDeleteByQueryTask(DeleteByQueryRequest deleteByQueryRequest,
|
||||
RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(
|
||||
deleteByQueryRequest, RequestConverters::submitDeleteByQuery, options, TaskSubmissionResponse::fromXContent, emptySet()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a delete by query request.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html">
|
||||
|
|
|
@ -21,29 +21,29 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.core.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse;
|
||||
import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.PutDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.StopDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.DeleteTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformResponse;
|
||||
import org.elasticsearch.client.transform.GetTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformStatsResponse;
|
||||
import org.elasticsearch.client.transform.PreviewTransformRequest;
|
||||
import org.elasticsearch.client.transform.PreviewTransformResponse;
|
||||
import org.elasticsearch.client.transform.PutTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartTransformResponse;
|
||||
import org.elasticsearch.client.transform.StopTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopTransformResponse;
|
||||
import org.elasticsearch.client.transform.UpdateTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateTransformResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
public final class DataFrameClient {
|
||||
public final class TransformClient {
|
||||
|
||||
private final RestHighLevelClient restHighLevelClient;
|
||||
|
||||
DataFrameClient(RestHighLevelClient restHighLevelClient) {
|
||||
TransformClient(RestHighLevelClient restHighLevelClient) {
|
||||
this.restHighLevelClient = restHighLevelClient;
|
||||
}
|
||||
|
||||
|
@ -54,15 +54,15 @@ public final class DataFrameClient {
|
|||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html">
|
||||
* Create transform documentation</a>
|
||||
*
|
||||
* @param request The PutDataFrameTransformRequest containing the
|
||||
* {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfig}.
|
||||
* @param request The PutTransformRequest containing the
|
||||
* {@link org.elasticsearch.client.transform.transforms.TransformConfig}.
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return An AcknowledgedResponse object indicating request success
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public AcknowledgedResponse putDataFrameTransform(PutDataFrameTransformRequest request, RequestOptions options) throws IOException {
|
||||
public AcknowledgedResponse putTransform(PutTransformRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
DataFrameRequestConverters::putDataFrameTransform,
|
||||
TransformRequestConverters::putTransform,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
|
@ -74,16 +74,16 @@ public final class DataFrameClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html">
|
||||
* Create transform documentation</a>
|
||||
* @param request The PutDataFrameTransformRequest containing the
|
||||
* {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfig}.
|
||||
* @param request The PutTransformRequest containing the
|
||||
* {@link org.elasticsearch.client.transform.transforms.TransformConfig}.
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable putDataFrameTransformAsync(PutDataFrameTransformRequest request, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
public Cancellable putTransformAsync(PutTransformRequest request, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
DataFrameRequestConverters::putDataFrameTransform,
|
||||
TransformRequestConverters::putTransform,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener,
|
||||
|
@ -97,18 +97,18 @@ public final class DataFrameClient {
|
|||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html">
|
||||
* Create transform documentation</a>
|
||||
*
|
||||
* @param request The UpdateDataFrameTransformRequest containing the
|
||||
* {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate}.
|
||||
* @param request The UpdateTransformRequest containing the
|
||||
* {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}.
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return An UpdateDataFrameTransformResponse object containing the updated configuration
|
||||
* @return An UpdateTransformResponse object containing the updated configuration
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public UpdateDataFrameTransformResponse updateDataFrameTransform(UpdateDataFrameTransformRequest request,
|
||||
RequestOptions options) throws IOException {
|
||||
public UpdateTransformResponse updateTransform(UpdateTransformRequest request,
|
||||
RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
DataFrameRequestConverters::updateDataFrameTransform,
|
||||
TransformRequestConverters::updateTransform,
|
||||
options,
|
||||
UpdateDataFrameTransformResponse::fromXContent,
|
||||
UpdateTransformResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
|
@ -118,19 +118,19 @@ public final class DataFrameClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html">
|
||||
* Create transform documentation</a>
|
||||
* @param request The UpdateDataFrameTransformRequest containing the
|
||||
* {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate}.
|
||||
* @param request The UpdateTransformRequest containing the
|
||||
* {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}.
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable updateDataFrameTransformAsync(UpdateDataFrameTransformRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<UpdateDataFrameTransformResponse> listener) {
|
||||
public Cancellable updateTransformAsync(UpdateTransformRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<UpdateTransformResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
DataFrameRequestConverters::updateDataFrameTransform,
|
||||
TransformRequestConverters::updateTransform,
|
||||
options,
|
||||
UpdateDataFrameTransformResponse::fromXContent,
|
||||
UpdateTransformResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
@ -142,17 +142,17 @@ public final class DataFrameClient {
|
|||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html">
|
||||
* Get transform stats documentation</a>
|
||||
*
|
||||
* @param request Specifies the which transforms to get the stats for
|
||||
* @param request Specifies which transforms to get the stats for
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return The transform stats
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public GetDataFrameTransformStatsResponse getDataFrameTransformStats(GetDataFrameTransformStatsRequest request, RequestOptions options)
|
||||
public GetTransformStatsResponse getTransformStats(GetTransformStatsRequest request, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
DataFrameRequestConverters::getDataFrameTransformStats,
|
||||
TransformRequestConverters::getTransformStats,
|
||||
options,
|
||||
GetDataFrameTransformStatsResponse::fromXContent,
|
||||
GetTransformStatsResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
|
@ -162,17 +162,17 @@ public final class DataFrameClient {
|
|||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html">
|
||||
* Get transform stats documentation</a>
|
||||
* @param request Specifies the which transforms to get the stats for
|
||||
* @param request Specifies which transforms to get the stats for
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable getDataFrameTransformStatsAsync(GetDataFrameTransformStatsRequest request, RequestOptions options,
|
||||
ActionListener<GetDataFrameTransformStatsResponse> listener) {
|
||||
public Cancellable getTransformStatsAsync(GetTransformStatsRequest request, RequestOptions options,
|
||||
ActionListener<GetTransformStatsResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
DataFrameRequestConverters::getDataFrameTransformStats,
|
||||
TransformRequestConverters::getTransformStats,
|
||||
options,
|
||||
GetDataFrameTransformStatsResponse::fromXContent,
|
||||
GetTransformStatsResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
@ -189,10 +189,10 @@ public final class DataFrameClient {
|
|||
* @return An AcknowledgedResponse object indicating request success
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public AcknowledgedResponse deleteDataFrameTransform(DeleteDataFrameTransformRequest request, RequestOptions options)
|
||||
public AcknowledgedResponse deleteTransform(DeleteTransformRequest request, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
DataFrameRequestConverters::deleteDataFrameTransform,
|
||||
TransformRequestConverters::deleteTransform,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
|
@ -209,10 +209,10 @@ public final class DataFrameClient {
|
|||
* @param listener Listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable deleteDataFrameTransformAsync(DeleteDataFrameTransformRequest request, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
public Cancellable deleteTransformAsync(DeleteTransformRequest request, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
DataFrameRequestConverters::deleteDataFrameTransform,
|
||||
TransformRequestConverters::deleteTransform,
|
||||
options,
|
||||
AcknowledgedResponse::fromXContent,
|
||||
listener,
|
||||
|
@ -231,12 +231,12 @@ public final class DataFrameClient {
|
|||
* @return A response containing the results of the applied transform
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PreviewDataFrameTransformResponse previewDataFrameTransform(PreviewDataFrameTransformRequest request, RequestOptions options)
|
||||
public PreviewTransformResponse previewTransform(PreviewTransformRequest request, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
DataFrameRequestConverters::previewDataFrameTransform,
|
||||
TransformRequestConverters::previewTransform,
|
||||
options,
|
||||
PreviewDataFrameTransformResponse::fromXContent,
|
||||
PreviewTransformResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
|
@ -250,12 +250,12 @@ public final class DataFrameClient {
|
|||
* @param listener Listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable previewDataFrameTransformAsync(PreviewDataFrameTransformRequest request, RequestOptions options,
|
||||
ActionListener<PreviewDataFrameTransformResponse> listener) {
|
||||
public Cancellable previewTransformAsync(PreviewTransformRequest request, RequestOptions options,
|
||||
ActionListener<PreviewTransformResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
DataFrameRequestConverters::previewDataFrameTransform,
|
||||
TransformRequestConverters::previewTransform,
|
||||
options,
|
||||
PreviewDataFrameTransformResponse::fromXContent,
|
||||
PreviewTransformResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
@ -272,12 +272,12 @@ public final class DataFrameClient {
|
|||
* @return A response object indicating request success
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public StartDataFrameTransformResponse startDataFrameTransform(StartDataFrameTransformRequest request, RequestOptions options)
|
||||
public StartTransformResponse startTransform(StartTransformRequest request, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
DataFrameRequestConverters::startDataFrameTransform,
|
||||
TransformRequestConverters::startTransform,
|
||||
options,
|
||||
StartDataFrameTransformResponse::fromXContent,
|
||||
StartTransformResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
|
@ -292,12 +292,12 @@ public final class DataFrameClient {
|
|||
* @param listener Listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable startDataFrameTransformAsync(StartDataFrameTransformRequest request, RequestOptions options,
|
||||
ActionListener<StartDataFrameTransformResponse> listener) {
|
||||
public Cancellable startTransformAsync(StartTransformRequest request, RequestOptions options,
|
||||
ActionListener<StartTransformResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
DataFrameRequestConverters::startDataFrameTransform,
|
||||
TransformRequestConverters::startTransform,
|
||||
options,
|
||||
StartDataFrameTransformResponse::fromXContent,
|
||||
StartTransformResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
@ -314,12 +314,12 @@ public final class DataFrameClient {
|
|||
* @return A response object indicating request success
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public StopDataFrameTransformResponse stopDataFrameTransform(StopDataFrameTransformRequest request, RequestOptions options)
|
||||
public StopTransformResponse stopTransform(StopTransformRequest request, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
DataFrameRequestConverters::stopDataFrameTransform,
|
||||
TransformRequestConverters::stopTransform,
|
||||
options,
|
||||
StopDataFrameTransformResponse::fromXContent,
|
||||
StopTransformResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
|
@ -334,12 +334,12 @@ public final class DataFrameClient {
|
|||
* @param listener Listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable stopDataFrameTransformAsync(StopDataFrameTransformRequest request, RequestOptions options,
|
||||
ActionListener<StopDataFrameTransformResponse> listener) {
|
||||
public Cancellable stopTransformAsync(StopTransformRequest request, RequestOptions options,
|
||||
ActionListener<StopTransformResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
DataFrameRequestConverters::stopDataFrameTransform,
|
||||
TransformRequestConverters::stopTransform,
|
||||
options,
|
||||
StopDataFrameTransformResponse::fromXContent,
|
||||
StopTransformResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
@ -353,15 +353,15 @@ public final class DataFrameClient {
|
|||
*
|
||||
* @param request The get transform request
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return An GetDataFrameTransformResponse containing the requested transforms
|
||||
* @return An GetTransformResponse containing the requested transforms
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public GetDataFrameTransformResponse getDataFrameTransform(GetDataFrameTransformRequest request, RequestOptions options)
|
||||
public GetTransformResponse getTransform(GetTransformRequest request, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
DataFrameRequestConverters::getDataFrameTransform,
|
||||
TransformRequestConverters::getTransform,
|
||||
options,
|
||||
GetDataFrameTransformResponse::fromXContent,
|
||||
GetTransformResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
|
@ -376,12 +376,12 @@ public final class DataFrameClient {
|
|||
* @param listener Listener to be notified upon request completion
|
||||
* @return cancellable that may be used to cancel the request
|
||||
*/
|
||||
public Cancellable getDataFrameTransformAsync(GetDataFrameTransformRequest request, RequestOptions options,
|
||||
ActionListener<GetDataFrameTransformResponse> listener) {
|
||||
public Cancellable getTransformAsync(GetTransformRequest request, RequestOptions options,
|
||||
ActionListener<GetTransformResponse> listener) {
|
||||
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
DataFrameRequestConverters::getDataFrameTransform,
|
||||
TransformRequestConverters::getTransform,
|
||||
options,
|
||||
GetDataFrameTransformResponse::fromXContent,
|
||||
GetTransformResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
|
@ -24,29 +24,29 @@ import org.apache.http.client.methods.HttpGet;
|
|||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.core.PageParams;
|
||||
import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.PutDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.DeleteTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.PreviewTransformRequest;
|
||||
import org.elasticsearch.client.transform.PutTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateTransformRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||
import static org.elasticsearch.client.transform.DeleteDataFrameTransformRequest.FORCE;
|
||||
import static org.elasticsearch.client.transform.GetDataFrameTransformRequest.ALLOW_NO_MATCH;
|
||||
import static org.elasticsearch.client.transform.PutDataFrameTransformRequest.DEFER_VALIDATION;
|
||||
import static org.elasticsearch.client.transform.DeleteTransformRequest.FORCE;
|
||||
import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH;
|
||||
import static org.elasticsearch.client.transform.PutTransformRequest.DEFER_VALIDATION;
|
||||
|
||||
final class DataFrameRequestConverters {
|
||||
final class TransformRequestConverters {
|
||||
|
||||
private DataFrameRequestConverters() {}
|
||||
private TransformRequestConverters() {}
|
||||
|
||||
static Request putDataFrameTransform(PutDataFrameTransformRequest putRequest) throws IOException {
|
||||
static Request putTransform(PutTransformRequest putRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_data_frame", "transforms")
|
||||
.addPathPart(putRequest.getConfig().getId())
|
||||
|
@ -59,7 +59,7 @@ final class DataFrameRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request updateDataFrameTransform(UpdateDataFrameTransformRequest updateDataFrameTransformRequest) throws IOException {
|
||||
static Request updateTransform(UpdateTransformRequest updateDataFrameTransformRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_data_frame", "transforms")
|
||||
.addPathPart(updateDataFrameTransformRequest.getId())
|
||||
|
@ -73,7 +73,7 @@ final class DataFrameRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request getDataFrameTransform(GetDataFrameTransformRequest getRequest) {
|
||||
static Request getTransform(GetTransformRequest getRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_data_frame", "transforms")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId()))
|
||||
|
@ -91,7 +91,7 @@ final class DataFrameRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request deleteDataFrameTransform(DeleteDataFrameTransformRequest deleteRequest) {
|
||||
static Request deleteTransform(DeleteTransformRequest deleteRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_data_frame", "transforms")
|
||||
.addPathPart(deleteRequest.getId())
|
||||
|
@ -103,7 +103,7 @@ final class DataFrameRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request startDataFrameTransform(StartDataFrameTransformRequest startRequest) {
|
||||
static Request startTransform(StartTransformRequest startRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_data_frame", "transforms")
|
||||
.addPathPart(startRequest.getId())
|
||||
|
@ -118,7 +118,7 @@ final class DataFrameRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request stopDataFrameTransform(StopDataFrameTransformRequest stopRequest) {
|
||||
static Request stopTransform(StopTransformRequest stopRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_data_frame", "transforms")
|
||||
.addPathPart(stopRequest.getId())
|
||||
|
@ -139,7 +139,7 @@ final class DataFrameRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request previewDataFrameTransform(PreviewDataFrameTransformRequest previewRequest) throws IOException {
|
||||
static Request previewTransform(PreviewTransformRequest previewRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_data_frame", "transforms", "_preview")
|
||||
.build();
|
||||
|
@ -148,7 +148,7 @@ final class DataFrameRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request getDataFrameTransformStats(GetDataFrameTransformStatsRequest statsRequest) {
|
||||
static Request getTransformStats(GetTransformStatsRequest statsRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_data_frame", "transforms")
|
||||
.addPathPart(statsRequest.getId())
|
|
@ -113,8 +113,6 @@ public class DetailAnalyzeResponse {
|
|||
private final String name;
|
||||
private final AnalyzeResponse.AnalyzeToken[] tokens;
|
||||
|
||||
private static final String TOKENS = "tokens";
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference;
|
||||
|
||||
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
|
||||
import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree;
|
||||
import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding;
|
||||
import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding;
|
||||
import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor;
|
||||
import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.plugins.spi.NamedXContentProvider;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class MlInferenceNamedXContentProvider implements NamedXContentProvider {
|
||||
|
||||
@Override
|
||||
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
|
||||
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
|
||||
|
||||
// PreProcessing
|
||||
namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(OneHotEncoding.NAME),
|
||||
OneHotEncoding::fromXContent));
|
||||
namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(TargetMeanEncoding.NAME),
|
||||
TargetMeanEncoding::fromXContent));
|
||||
namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(FrequencyEncoding.NAME),
|
||||
FrequencyEncoding::fromXContent));
|
||||
|
||||
// Model
|
||||
namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Tree.NAME), Tree::fromXContent));
|
||||
|
||||
return namedXContent;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.preprocessing;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
|
||||
/**
|
||||
* PreProcessor for frequency encoding a set of categorical values for a given field.
|
||||
*/
|
||||
public class FrequencyEncoding implements PreProcessor {
|
||||
|
||||
public static final String NAME = "frequency_encoding";
|
||||
public static final ParseField FIELD = new ParseField("field");
|
||||
public static final ParseField FEATURE_NAME = new ParseField("feature_name");
|
||||
public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<FrequencyEncoding, Void> PARSER = new ConstructingObjectParser<>(
|
||||
NAME,
|
||||
true,
|
||||
a -> new FrequencyEncoding((String)a[0], (String)a[1], (Map<String, Double>)a[2]));
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(),
|
||||
(p, c) -> p.map(HashMap::new, XContentParser::doubleValue),
|
||||
FREQUENCY_MAP);
|
||||
}
|
||||
|
||||
public static FrequencyEncoding fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final String field;
|
||||
private final String featureName;
|
||||
private final Map<String, Double> frequencyMap;
|
||||
|
||||
public FrequencyEncoding(String field, String featureName, Map<String, Double> frequencyMap) {
|
||||
this.field = Objects.requireNonNull(field);
|
||||
this.featureName = Objects.requireNonNull(featureName);
|
||||
this.frequencyMap = Collections.unmodifiableMap(Objects.requireNonNull(frequencyMap));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Field name on which to frequency encode
|
||||
*/
|
||||
public String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Map of Value: frequency for the frequency encoding
|
||||
*/
|
||||
public Map<String, Double> getFrequencyMap() {
|
||||
return frequencyMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The encoded feature name
|
||||
*/
|
||||
public String getFeatureName() {
|
||||
return featureName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD.getPreferredName(), field);
|
||||
builder.field(FEATURE_NAME.getPreferredName(), featureName);
|
||||
builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
FrequencyEncoding that = (FrequencyEncoding) o;
|
||||
return Objects.equals(field, that.field)
|
||||
&& Objects.equals(featureName, that.featureName)
|
||||
&& Objects.equals(frequencyMap, that.frequencyMap);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(field, featureName, frequencyMap);
|
||||
}
|
||||
|
||||
public Builder builder(String field) {
|
||||
return new Builder(field);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private String field;
|
||||
private String featureName;
|
||||
private Map<String, Double> frequencyMap = new HashMap<>();
|
||||
|
||||
public Builder(String field) {
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
public Builder setField(String field) {
|
||||
this.field = field;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setFeatureName(String featureName) {
|
||||
this.featureName = featureName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setFrequencyMap(Map<String, Double> frequencyMap) {
|
||||
this.frequencyMap = new HashMap<>(frequencyMap);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addFrequency(String valueName, double frequency) {
|
||||
this.frequencyMap.put(valueName, frequency);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FrequencyEncoding build() {
|
||||
return new FrequencyEncoding(field, featureName, frequencyMap);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,138 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.preprocessing;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* PreProcessor for one hot encoding a set of categorical values for a given field.
|
||||
*/
|
||||
public class OneHotEncoding implements PreProcessor {
|
||||
|
||||
public static final String NAME = "one_hot_encoding";
|
||||
public static final ParseField FIELD = new ParseField("field");
|
||||
public static final ParseField HOT_MAP = new ParseField("hot_map");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<OneHotEncoding, Void> PARSER = new ConstructingObjectParser<>(
|
||||
NAME,
|
||||
true,
|
||||
a -> new OneHotEncoding((String)a[0], (Map<String, String>)a[1]));
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP);
|
||||
}
|
||||
|
||||
public static OneHotEncoding fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final String field;
|
||||
private final Map<String, String> hotMap;
|
||||
|
||||
public OneHotEncoding(String field, Map<String, String> hotMap) {
|
||||
this.field = Objects.requireNonNull(field);
|
||||
this.hotMap = Collections.unmodifiableMap(Objects.requireNonNull(hotMap));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Field name on which to one hot encode
|
||||
*/
|
||||
public String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Map of Value: ColumnName for the one hot encoding
|
||||
*/
|
||||
public Map<String, String> getHotMap() {
|
||||
return hotMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD.getPreferredName(), field);
|
||||
builder.field(HOT_MAP.getPreferredName(), hotMap);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
OneHotEncoding that = (OneHotEncoding) o;
|
||||
return Objects.equals(field, that.field)
|
||||
&& Objects.equals(hotMap, that.hotMap);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(field, hotMap);
|
||||
}
|
||||
|
||||
public Builder builder(String field) {
|
||||
return new Builder(field);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private String field;
|
||||
private Map<String, String> hotMap = new HashMap<>();
|
||||
|
||||
public Builder(String field) {
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
public Builder setField(String field) {
|
||||
this.field = field;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setHotMap(Map<String, String> hotMap) {
|
||||
this.hotMap = new HashMap<>(hotMap);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addOneHot(String valueName, String oneHotFeatureName) {
|
||||
this.hotMap.put(valueName, oneHotFeatureName);
|
||||
return this;
|
||||
}
|
||||
|
||||
public OneHotEncoding build() {
|
||||
return new OneHotEncoding(field, hotMap);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.preprocessing;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
|
||||
|
||||
/**
|
||||
* Describes a pre-processor for a defined machine learning model
|
||||
*/
|
||||
public interface PreProcessor extends ToXContentObject {
|
||||
|
||||
/**
|
||||
* @return The name of the pre-processor
|
||||
*/
|
||||
String getName();
|
||||
}
|
|
@ -0,0 +1,183 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.preprocessing;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
|
||||
/**
|
||||
* PreProcessor for target mean encoding a set of categorical values for a given field.
|
||||
*/
|
||||
public class TargetMeanEncoding implements PreProcessor {
|
||||
|
||||
public static final String NAME = "target_mean_encoding";
|
||||
public static final ParseField FIELD = new ParseField("field");
|
||||
public static final ParseField FEATURE_NAME = new ParseField("feature_name");
|
||||
public static final ParseField TARGET_MEANS = new ParseField("target_means");
|
||||
public static final ParseField DEFAULT_VALUE = new ParseField("default_value");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<TargetMeanEncoding, Void> PARSER = new ConstructingObjectParser<>(
|
||||
NAME,
|
||||
true,
|
||||
a -> new TargetMeanEncoding((String)a[0], (String)a[1], (Map<String, Double>)a[2], (Double)a[3]));
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME);
|
||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(),
|
||||
(p, c) -> p.map(HashMap::new, XContentParser::doubleValue),
|
||||
TARGET_MEANS);
|
||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE);
|
||||
}
|
||||
|
||||
public static TargetMeanEncoding fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final String field;
|
||||
private final String featureName;
|
||||
private final Map<String, Double> meanMap;
|
||||
private final double defaultValue;
|
||||
|
||||
public TargetMeanEncoding(String field, String featureName, Map<String, Double> meanMap, Double defaultValue) {
|
||||
this.field = Objects.requireNonNull(field);
|
||||
this.featureName = Objects.requireNonNull(featureName);
|
||||
this.meanMap = Collections.unmodifiableMap(Objects.requireNonNull(meanMap));
|
||||
this.defaultValue = Objects.requireNonNull(defaultValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Field name on which to target mean encode
|
||||
*/
|
||||
public String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Map of Value: targetMean for the target mean encoding
|
||||
*/
|
||||
public Map<String, Double> getMeanMap() {
|
||||
return meanMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The default value to set when a previously unobserved value is seen
|
||||
*/
|
||||
public double getDefaultValue() {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The feature name for the encoded value
|
||||
*/
|
||||
public String getFeatureName() {
|
||||
return featureName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD.getPreferredName(), field);
|
||||
builder.field(FEATURE_NAME.getPreferredName(), featureName);
|
||||
builder.field(TARGET_MEANS.getPreferredName(), meanMap);
|
||||
builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
TargetMeanEncoding that = (TargetMeanEncoding) o;
|
||||
return Objects.equals(field, that.field)
|
||||
&& Objects.equals(featureName, that.featureName)
|
||||
&& Objects.equals(meanMap, that.meanMap)
|
||||
&& Objects.equals(defaultValue, that.defaultValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(field, featureName, meanMap, defaultValue);
|
||||
}
|
||||
|
||||
public Builder builder(String field) {
|
||||
return new Builder(field);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private String field;
|
||||
private String featureName;
|
||||
private Map<String, Double> meanMap = new HashMap<>();
|
||||
private double defaultValue;
|
||||
|
||||
public Builder(String field) {
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
public String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
public Builder setField(String field) {
|
||||
this.field = field;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setFeatureName(String featureName) {
|
||||
this.featureName = featureName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setMeanMap(Map<String, Double> meanMap) {
|
||||
this.meanMap = meanMap;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addMeanMapEntry(String valueName, double meanEncoding) {
|
||||
this.meanMap.put(valueName, meanEncoding);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setDefaultValue(double defaultValue) {
|
||||
this.defaultValue = defaultValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TargetMeanEncoding build() {
|
||||
return new TargetMeanEncoding(field, featureName, meanMap, defaultValue);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -16,26 +16,21 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.trainedmodel;
|
||||
|
||||
package org.elasticsearch.search;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import java.util.List;
|
||||
|
||||
import java.io.IOException;
|
||||
public interface TrainedModel extends ToXContentObject {
|
||||
|
||||
public class SearchContextException extends SearchException {
|
||||
|
||||
public SearchContextException(SearchContext context, String msg) {
|
||||
super(context.shardTarget(), msg);
|
||||
}
|
||||
|
||||
public SearchContextException(SearchContext context, String msg, Throwable t) {
|
||||
super(context.shardTarget(), msg, t);
|
||||
}
|
||||
|
||||
public SearchContextException(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
}
|
||||
/**
|
||||
* @return List of featureNames expected by the model. In the order that they are expected
|
||||
*/
|
||||
List<String> getFeatureNames();
|
||||
|
||||
/**
|
||||
* @return The name of the model
|
||||
*/
|
||||
String getName();
|
||||
}
|
|
@ -0,0 +1,192 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.trainedmodel.tree;
|
||||
|
||||
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class Tree implements TrainedModel {
|
||||
|
||||
public static final String NAME = "tree";
|
||||
|
||||
public static final ParseField FEATURE_NAMES = new ParseField("feature_names");
|
||||
public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure");
|
||||
|
||||
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(NAME, true, Builder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES);
|
||||
PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE);
|
||||
}
|
||||
|
||||
public static Tree fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
private final List<String> featureNames;
|
||||
private final List<TreeNode> nodes;
|
||||
|
||||
Tree(List<String> featureNames, List<TreeNode> nodes) {
|
||||
this.featureNames = Collections.unmodifiableList(Objects.requireNonNull(featureNames));
|
||||
this.nodes = Collections.unmodifiableList(Objects.requireNonNull(nodes));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getFeatureNames() {
|
||||
return featureNames;
|
||||
}
|
||||
|
||||
public List<TreeNode> getNodes() {
|
||||
return nodes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FEATURE_NAMES.getPreferredName(), featureNames);
|
||||
builder.field(TREE_STRUCTURE.getPreferredName(), nodes);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Tree that = (Tree) o;
|
||||
return Objects.equals(featureNames, that.featureNames)
|
||||
&& Objects.equals(nodes, that.nodes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(featureNames, nodes);
|
||||
}
|
||||
|
||||
public static Builder builder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private List<String> featureNames;
|
||||
private ArrayList<TreeNode.Builder> nodes;
|
||||
private int numNodes;
|
||||
|
||||
public Builder() {
|
||||
nodes = new ArrayList<>();
|
||||
// allocate space in the root node and set to a leaf
|
||||
nodes.add(null);
|
||||
addLeaf(0, 0.0);
|
||||
numNodes = 1;
|
||||
}
|
||||
|
||||
public Builder setFeatureNames(List<String> featureNames) {
|
||||
this.featureNames = featureNames;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addNode(TreeNode.Builder node) {
|
||||
nodes.add(node);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setNodes(List<TreeNode.Builder> nodes) {
|
||||
this.nodes = new ArrayList<>(nodes);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setNodes(TreeNode.Builder... nodes) {
|
||||
return setNodes(Arrays.asList(nodes));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a decision node. Space for the child nodes is allocated
|
||||
* @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index
|
||||
* @param featureIndex The feature index the decision is made on
|
||||
* @param isDefaultLeft Default left branch if the feature is missing
|
||||
* @param decisionThreshold The decision threshold
|
||||
* @return The created node
|
||||
*/
|
||||
public TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) {
|
||||
int leftChild = numNodes++;
|
||||
int rightChild = numNodes++;
|
||||
nodes.ensureCapacity(nodeIndex + 1);
|
||||
for (int i = nodes.size(); i < nodeIndex + 1; i++) {
|
||||
nodes.add(null);
|
||||
}
|
||||
|
||||
TreeNode.Builder node = TreeNode.builder(nodeIndex)
|
||||
.setDefaultLeft(isDefaultLeft)
|
||||
.setLeftChild(leftChild)
|
||||
.setRightChild(rightChild)
|
||||
.setSplitFeature(featureIndex)
|
||||
.setThreshold(decisionThreshold);
|
||||
nodes.set(nodeIndex, node);
|
||||
|
||||
// allocate space for the child nodes
|
||||
while (nodes.size() <= rightChild) {
|
||||
nodes.add(null);
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the node at {@code nodeIndex} to a leaf node.
|
||||
* @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)}
|
||||
* @param value The prediction value
|
||||
* @return this
|
||||
*/
|
||||
public Builder addLeaf(int nodeIndex, double value) {
|
||||
for (int i = nodes.size(); i < nodeIndex + 1; i++) {
|
||||
nodes.add(null);
|
||||
}
|
||||
nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(value));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Tree build() {
|
||||
return new Tree(featureNames,
|
||||
nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,280 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.trainedmodel.tree;
|
||||
|
||||
import org.elasticsearch.client.ml.job.config.Operator;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class TreeNode implements ToXContentObject {
|
||||
|
||||
public static final String NAME = "tree_node";
|
||||
|
||||
public static final ParseField DECISION_TYPE = new ParseField("decision_type");
|
||||
public static final ParseField THRESHOLD = new ParseField("threshold");
|
||||
public static final ParseField LEFT_CHILD = new ParseField("left_child");
|
||||
public static final ParseField RIGHT_CHILD = new ParseField("right_child");
|
||||
public static final ParseField DEFAULT_LEFT = new ParseField("default_left");
|
||||
public static final ParseField SPLIT_FEATURE = new ParseField("split_feature");
|
||||
public static final ParseField NODE_INDEX = new ParseField("node_index");
|
||||
public static final ParseField SPLIT_GAIN = new ParseField("split_gain");
|
||||
public static final ParseField LEAF_VALUE = new ParseField("leaf_value");
|
||||
|
||||
|
||||
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(
|
||||
NAME,
|
||||
true,
|
||||
Builder::new);
|
||||
static {
|
||||
PARSER.declareDouble(Builder::setThreshold, THRESHOLD);
|
||||
PARSER.declareField(Builder::setOperator,
|
||||
p -> Operator.fromString(p.text()),
|
||||
DECISION_TYPE,
|
||||
ObjectParser.ValueType.STRING);
|
||||
PARSER.declareInt(Builder::setLeftChild, LEFT_CHILD);
|
||||
PARSER.declareInt(Builder::setRightChild, RIGHT_CHILD);
|
||||
PARSER.declareBoolean(Builder::setDefaultLeft, DEFAULT_LEFT);
|
||||
PARSER.declareInt(Builder::setSplitFeature, SPLIT_FEATURE);
|
||||
PARSER.declareInt(Builder::setNodeIndex, NODE_INDEX);
|
||||
PARSER.declareDouble(Builder::setSplitGain, SPLIT_GAIN);
|
||||
PARSER.declareDouble(Builder::setLeafValue, LEAF_VALUE);
|
||||
}
|
||||
|
||||
public static Builder fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final Operator operator;
|
||||
private final Double threshold;
|
||||
private final Integer splitFeature;
|
||||
private final int nodeIndex;
|
||||
private final Double splitGain;
|
||||
private final Double leafValue;
|
||||
private final Boolean defaultLeft;
|
||||
private final Integer leftChild;
|
||||
private final Integer rightChild;
|
||||
|
||||
|
||||
TreeNode(Operator operator,
|
||||
Double threshold,
|
||||
Integer splitFeature,
|
||||
int nodeIndex,
|
||||
Double splitGain,
|
||||
Double leafValue,
|
||||
Boolean defaultLeft,
|
||||
Integer leftChild,
|
||||
Integer rightChild) {
|
||||
this.operator = operator;
|
||||
this.threshold = threshold;
|
||||
this.splitFeature = splitFeature;
|
||||
this.nodeIndex = nodeIndex;
|
||||
this.splitGain = splitGain;
|
||||
this.leafValue = leafValue;
|
||||
this.defaultLeft = defaultLeft;
|
||||
this.leftChild = leftChild;
|
||||
this.rightChild = rightChild;
|
||||
}
|
||||
|
||||
public Operator getOperator() {
|
||||
return operator;
|
||||
}
|
||||
|
||||
public Double getThreshold() {
|
||||
return threshold;
|
||||
}
|
||||
|
||||
public Integer getSplitFeature() {
|
||||
return splitFeature;
|
||||
}
|
||||
|
||||
public Integer getNodeIndex() {
|
||||
return nodeIndex;
|
||||
}
|
||||
|
||||
public Double getSplitGain() {
|
||||
return splitGain;
|
||||
}
|
||||
|
||||
public Double getLeafValue() {
|
||||
return leafValue;
|
||||
}
|
||||
|
||||
public Boolean isDefaultLeft() {
|
||||
return defaultLeft;
|
||||
}
|
||||
|
||||
public Integer getLeftChild() {
|
||||
return leftChild;
|
||||
}
|
||||
|
||||
public Integer getRightChild() {
|
||||
return rightChild;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
addOptionalField(builder, DECISION_TYPE, operator);
|
||||
addOptionalField(builder, THRESHOLD, threshold);
|
||||
addOptionalField(builder, SPLIT_FEATURE, splitFeature);
|
||||
addOptionalField(builder, SPLIT_GAIN, splitGain);
|
||||
addOptionalField(builder, NODE_INDEX, nodeIndex);
|
||||
addOptionalField(builder, LEAF_VALUE, leafValue);
|
||||
addOptionalField(builder, DEFAULT_LEFT, defaultLeft );
|
||||
addOptionalField(builder, LEFT_CHILD, leftChild);
|
||||
addOptionalField(builder, RIGHT_CHILD, rightChild);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException {
|
||||
if (value != null) {
|
||||
builder.field(field.getPreferredName(), value);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
TreeNode that = (TreeNode) o;
|
||||
return Objects.equals(operator, that.operator)
|
||||
&& Objects.equals(threshold, that.threshold)
|
||||
&& Objects.equals(splitFeature, that.splitFeature)
|
||||
&& Objects.equals(nodeIndex, that.nodeIndex)
|
||||
&& Objects.equals(splitGain, that.splitGain)
|
||||
&& Objects.equals(leafValue, that.leafValue)
|
||||
&& Objects.equals(defaultLeft, that.defaultLeft)
|
||||
&& Objects.equals(leftChild, that.leftChild)
|
||||
&& Objects.equals(rightChild, that.rightChild);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(operator,
|
||||
threshold,
|
||||
splitFeature,
|
||||
splitGain,
|
||||
nodeIndex,
|
||||
leafValue,
|
||||
defaultLeft,
|
||||
leftChild,
|
||||
rightChild);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
public static Builder builder(int nodeIndex) {
|
||||
return new Builder(nodeIndex);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private Operator operator;
|
||||
private Double threshold;
|
||||
private Integer splitFeature;
|
||||
private int nodeIndex;
|
||||
private Double splitGain;
|
||||
private Double leafValue;
|
||||
private Boolean defaultLeft;
|
||||
private Integer leftChild;
|
||||
private Integer rightChild;
|
||||
|
||||
public Builder(int nodeIndex) {
|
||||
nodeIndex = nodeIndex;
|
||||
}
|
||||
|
||||
private Builder() {
|
||||
}
|
||||
|
||||
public Builder setOperator(Operator operator) {
|
||||
this.operator = operator;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setThreshold(Double threshold) {
|
||||
this.threshold = threshold;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setSplitFeature(Integer splitFeature) {
|
||||
this.splitFeature = splitFeature;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setNodeIndex(int nodeIndex) {
|
||||
this.nodeIndex = nodeIndex;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setSplitGain(Double splitGain) {
|
||||
this.splitGain = splitGain;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setLeafValue(Double leafValue) {
|
||||
this.leafValue = leafValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setDefaultLeft(Boolean defaultLeft) {
|
||||
this.defaultLeft = defaultLeft;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setLeftChild(Integer leftChild) {
|
||||
this.leftChild = leftChild;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getLeftChild() {
|
||||
return leftChild;
|
||||
}
|
||||
|
||||
public Builder setRightChild(Integer rightChild) {
|
||||
this.rightChild = rightChild;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer getRightChild() {
|
||||
return rightChild;
|
||||
}
|
||||
|
||||
public TreeNode build() {
|
||||
return new TreeNode(operator,
|
||||
threshold,
|
||||
splitFeature,
|
||||
nodeIndex,
|
||||
splitGain,
|
||||
leafValue,
|
||||
defaultLeft,
|
||||
leftChild,
|
||||
rightChild);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -74,7 +74,7 @@ public class SnapshotLifecycleStats implements ToXContentObject {
|
|||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_FAILED);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIMED_OUT);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIME_MILLIS);
|
||||
PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> SnapshotPolicyStats.parse(p, n), POLICY_STATS);
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshotPolicyStats.PARSER, POLICY_STATS);
|
||||
}
|
||||
|
||||
// Package visible for testing
|
||||
|
@ -178,22 +178,25 @@ public class SnapshotLifecycleStats implements ToXContentObject {
|
|||
private final long snapshotsDeleted;
|
||||
private final long snapshotDeleteFailures;
|
||||
|
||||
public static final ParseField POLICY_ID = new ParseField("policy");
|
||||
static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken");
|
||||
static final ParseField SNAPSHOTS_FAILED = new ParseField("snapshots_failed");
|
||||
static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted");
|
||||
static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures");
|
||||
|
||||
private static final ConstructingObjectParser<SnapshotPolicyStats, String> PARSER =
|
||||
private static final ConstructingObjectParser<SnapshotPolicyStats, Void> PARSER =
|
||||
new ConstructingObjectParser<>("snapshot_policy_stats", true,
|
||||
(a, id) -> {
|
||||
long taken = (long) a[0];
|
||||
long failed = (long) a[1];
|
||||
long deleted = (long) a[2];
|
||||
long deleteFailed = (long) a[3];
|
||||
a -> {
|
||||
String id = (String) a[0];
|
||||
long taken = (long) a[1];
|
||||
long failed = (long) a[2];
|
||||
long deleted = (long) a[3];
|
||||
long deleteFailed = (long) a[4];
|
||||
return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_TAKEN);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_FAILED);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_DELETED);
|
||||
|
@ -209,7 +212,11 @@ public class SnapshotLifecycleStats implements ToXContentObject {
|
|||
}
|
||||
|
||||
public static SnapshotPolicyStats parse(XContentParser parser, String policyId) {
|
||||
return PARSER.apply(parser, policyId);
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public String getPolicyId() {
|
||||
return policyId;
|
||||
}
|
||||
|
||||
public long getSnapshotsTaken() {
|
||||
|
|
|
@ -27,16 +27,16 @@ import java.util.Optional;
|
|||
|
||||
|
||||
/**
|
||||
* Request to delete a data frame transform
|
||||
* Request to delete a transform
|
||||
*/
|
||||
public class DeleteDataFrameTransformRequest implements Validatable {
|
||||
public class DeleteTransformRequest implements Validatable {
|
||||
|
||||
public static final String FORCE = "force";
|
||||
|
||||
private final String id;
|
||||
private Boolean force;
|
||||
|
||||
public DeleteDataFrameTransformRequest(String id) {
|
||||
public DeleteTransformRequest(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
|
@ -56,7 +56,7 @@ public class DeleteDataFrameTransformRequest implements Validatable {
|
|||
public Optional<ValidationException> validate() {
|
||||
if (id == null) {
|
||||
ValidationException validationException = new ValidationException();
|
||||
validationException.addValidationError("data frame transform id must not be null");
|
||||
validationException.addValidationError("transform id must not be null");
|
||||
return Optional.of(validationException);
|
||||
} else {
|
||||
return Optional.empty();
|
||||
|
@ -77,7 +77,7 @@ public class DeleteDataFrameTransformRequest implements Validatable {
|
|||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
DeleteDataFrameTransformRequest other = (DeleteDataFrameTransformRequest) obj;
|
||||
DeleteTransformRequest other = (DeleteTransformRequest) obj;
|
||||
return Objects.equals(id, other.id) && Objects.equals(force, other.force);
|
||||
}
|
||||
}
|
|
@ -28,22 +28,22 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class GetDataFrameTransformRequest implements Validatable {
|
||||
public class GetTransformRequest implements Validatable {
|
||||
|
||||
public static final String ALLOW_NO_MATCH = "allow_no_match";
|
||||
/**
|
||||
* Helper method to create a request that will get ALL Data Frame Transforms
|
||||
* @return new {@link GetDataFrameTransformRequest} object for the id "_all"
|
||||
* Helper method to create a request that will get ALL Transforms
|
||||
* @return new {@link GetTransformRequest} object for the id "_all"
|
||||
*/
|
||||
public static GetDataFrameTransformRequest getAllDataFrameTransformsRequest() {
|
||||
return new GetDataFrameTransformRequest("_all");
|
||||
public static GetTransformRequest getAllTransformRequest() {
|
||||
return new GetTransformRequest("_all");
|
||||
}
|
||||
|
||||
private final List<String> ids;
|
||||
private PageParams pageParams;
|
||||
private Boolean allowNoMatch;
|
||||
|
||||
public GetDataFrameTransformRequest(String... ids) {
|
||||
public GetTransformRequest(String... ids) {
|
||||
this.ids = Arrays.asList(ids);
|
||||
}
|
||||
|
||||
|
@ -71,7 +71,7 @@ public class GetDataFrameTransformRequest implements Validatable {
|
|||
public Optional<ValidationException> validate() {
|
||||
if (ids == null || ids.isEmpty()) {
|
||||
ValidationException validationException = new ValidationException();
|
||||
validationException.addValidationError("data frame transform id must not be null");
|
||||
validationException.addValidationError("transform id must not be null");
|
||||
return Optional.of(validationException);
|
||||
} else {
|
||||
return Optional.empty();
|
||||
|
@ -92,7 +92,7 @@ public class GetDataFrameTransformRequest implements Validatable {
|
|||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj;
|
||||
GetTransformRequest other = (GetTransformRequest) obj;
|
||||
return Objects.equals(ids, other.ids)
|
||||
&& Objects.equals(pageParams, other.pageParams)
|
||||
&& Objects.equals(allowNoMatch, other.allowNoMatch);
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.client.transform;
|
||||
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
|
@ -31,7 +31,7 @@ import java.util.Objects;
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class GetDataFrameTransformResponse {
|
||||
public class GetTransformResponse {
|
||||
|
||||
public static final ParseField TRANSFORMS = new ParseField("transforms");
|
||||
public static final ParseField INVALID_TRANSFORMS = new ParseField("invalid_transforms");
|
||||
|
@ -42,30 +42,30 @@ public class GetDataFrameTransformResponse {
|
|||
new ConstructingObjectParser<>("invalid_transforms", true, args -> new InvalidTransforms((List<String>) args[0]));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<GetDataFrameTransformResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_data_frame_transform", true, args -> new GetDataFrameTransformResponse(
|
||||
(List<DataFrameTransformConfig>) args[0], (int) args[1], (InvalidTransforms) args[2]));
|
||||
static final ConstructingObjectParser<GetTransformResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_transform", true, args -> new GetTransformResponse(
|
||||
(List<TransformConfig>) args[0], (int) args[1], (InvalidTransforms) args[2]));
|
||||
static {
|
||||
// Discard the count field which is the size of the transforms array
|
||||
INVALID_TRANSFORMS_PARSER.declareInt((a, b) -> {}, COUNT);
|
||||
INVALID_TRANSFORMS_PARSER.declareStringArray(constructorArg(), TRANSFORMS);
|
||||
|
||||
PARSER.declareObjectArray(constructorArg(), DataFrameTransformConfig.PARSER::apply, TRANSFORMS);
|
||||
PARSER.declareObjectArray(constructorArg(), TransformConfig.PARSER::apply, TRANSFORMS);
|
||||
PARSER.declareInt(constructorArg(), COUNT);
|
||||
PARSER.declareObject(optionalConstructorArg(), INVALID_TRANSFORMS_PARSER::apply, INVALID_TRANSFORMS);
|
||||
}
|
||||
|
||||
public static GetDataFrameTransformResponse fromXContent(final XContentParser parser) {
|
||||
return GetDataFrameTransformResponse.PARSER.apply(parser, null);
|
||||
public static GetTransformResponse fromXContent(final XContentParser parser) {
|
||||
return GetTransformResponse.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private List<DataFrameTransformConfig> transformConfigurations;
|
||||
private List<TransformConfig> transformConfigurations;
|
||||
private int count;
|
||||
private InvalidTransforms invalidTransforms;
|
||||
|
||||
public GetDataFrameTransformResponse(List<DataFrameTransformConfig> transformConfigurations,
|
||||
int count,
|
||||
@Nullable InvalidTransforms invalidTransforms) {
|
||||
public GetTransformResponse(List<TransformConfig> transformConfigurations,
|
||||
int count,
|
||||
@Nullable InvalidTransforms invalidTransforms) {
|
||||
this.transformConfigurations = transformConfigurations;
|
||||
this.count = count;
|
||||
this.invalidTransforms = invalidTransforms;
|
||||
|
@ -80,7 +80,7 @@ public class GetDataFrameTransformResponse {
|
|||
return count;
|
||||
}
|
||||
|
||||
public List<DataFrameTransformConfig> getTransformConfigurations() {
|
||||
public List<TransformConfig> getTransformConfigurations() {
|
||||
return transformConfigurations;
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class GetDataFrameTransformResponse {
|
|||
return false;
|
||||
}
|
||||
|
||||
final GetDataFrameTransformResponse that = (GetDataFrameTransformResponse) other;
|
||||
final GetTransformResponse that = (GetTransformResponse) other;
|
||||
return Objects.equals(this.transformConfigurations, that.transformConfigurations)
|
||||
&& Objects.equals(this.count, that.count)
|
||||
&& Objects.equals(this.invalidTransforms, that.invalidTransforms);
|
|
@ -26,12 +26,12 @@ import org.elasticsearch.client.core.PageParams;
|
|||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class GetDataFrameTransformStatsRequest implements Validatable {
|
||||
public class GetTransformStatsRequest implements Validatable {
|
||||
private final String id;
|
||||
private PageParams pageParams;
|
||||
private Boolean allowNoMatch;
|
||||
|
||||
public GetDataFrameTransformStatsRequest(String id) {
|
||||
public GetTransformStatsRequest(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
|
@ -59,7 +59,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
|
|||
public Optional<ValidationException> validate() {
|
||||
if (id == null) {
|
||||
ValidationException validationException = new ValidationException();
|
||||
validationException.addValidationError("data frame transform id must not be null");
|
||||
validationException.addValidationError("transform id must not be null");
|
||||
return Optional.of(validationException);
|
||||
} else {
|
||||
return Optional.empty();
|
||||
|
@ -80,7 +80,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable {
|
|||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj;
|
||||
GetTransformStatsRequest other = (GetTransformStatsRequest) obj;
|
||||
return Objects.equals(id, other.id)
|
||||
&& Objects.equals(pageParams, other.pageParams)
|
||||
&& Objects.equals(allowNoMatch, other.allowNoMatch);
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.client.transform;
|
|||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformStats;
|
||||
import org.elasticsearch.client.transform.transforms.TransformStats;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
|
@ -34,19 +34,19 @@ import java.util.Objects;
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class GetDataFrameTransformStatsResponse {
|
||||
public class GetTransformStatsResponse {
|
||||
|
||||
public static final ParseField TRANSFORMS = new ParseField("transforms");
|
||||
public static final ParseField COUNT = new ParseField("count");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<GetDataFrameTransformStatsResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_data_frame_transform_stats_response", true,
|
||||
args -> new GetDataFrameTransformStatsResponse((List<DataFrameTransformStats>) args[0],
|
||||
static final ConstructingObjectParser<GetTransformStatsResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"get_transform_stats_response", true,
|
||||
args -> new GetTransformStatsResponse((List<TransformStats>) args[0],
|
||||
(List<TaskOperationFailure>) args[1], (List<ElasticsearchException>) args[2]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(constructorArg(), DataFrameTransformStats.PARSER::apply, TRANSFORMS);
|
||||
PARSER.declareObjectArray(constructorArg(), TransformStats.PARSER::apply, TRANSFORMS);
|
||||
// Discard the count field which is the size of the transforms array
|
||||
PARSER.declareInt((a, b) -> {}, COUNT);
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p),
|
||||
|
@ -55,15 +55,15 @@ public class GetDataFrameTransformStatsResponse {
|
|||
AcknowledgedTasksResponse.NODE_FAILURES);
|
||||
}
|
||||
|
||||
public static GetDataFrameTransformStatsResponse fromXContent(final XContentParser parser) {
|
||||
return GetDataFrameTransformStatsResponse.PARSER.apply(parser, null);
|
||||
public static GetTransformStatsResponse fromXContent(final XContentParser parser) {
|
||||
return GetTransformStatsResponse.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final List<DataFrameTransformStats> transformsStats;
|
||||
private final List<TransformStats> transformsStats;
|
||||
private final List<TaskOperationFailure> taskFailures;
|
||||
private final List<ElasticsearchException> nodeFailures;
|
||||
|
||||
public GetDataFrameTransformStatsResponse(List<DataFrameTransformStats> transformsStats,
|
||||
public GetTransformStatsResponse(List<TransformStats> transformsStats,
|
||||
@Nullable List<TaskOperationFailure> taskFailures,
|
||||
@Nullable List<? extends ElasticsearchException> nodeFailures) {
|
||||
this.transformsStats = transformsStats;
|
||||
|
@ -71,7 +71,7 @@ public class GetDataFrameTransformStatsResponse {
|
|||
this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures);
|
||||
}
|
||||
|
||||
public List<DataFrameTransformStats> getTransformsStats() {
|
||||
public List<TransformStats> getTransformsStats() {
|
||||
return transformsStats;
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ public class GetDataFrameTransformStatsResponse {
|
|||
return false;
|
||||
}
|
||||
|
||||
final GetDataFrameTransformStatsResponse that = (GetDataFrameTransformStatsResponse) other;
|
||||
final GetTransformStatsResponse that = (GetTransformStatsResponse) other;
|
||||
return Objects.equals(this.transformsStats, that.transformsStats)
|
||||
&& Objects.equals(this.nodeFailures, that.nodeFailures)
|
||||
&& Objects.equals(this.taskFailures, that.taskFailures);
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.client.transform;
|
|||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -30,15 +30,15 @@ import java.io.IOException;
|
|||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class PreviewDataFrameTransformRequest implements ToXContentObject, Validatable {
|
||||
public class PreviewTransformRequest implements ToXContentObject, Validatable {
|
||||
|
||||
private final DataFrameTransformConfig config;
|
||||
private final TransformConfig config;
|
||||
|
||||
public PreviewDataFrameTransformRequest(DataFrameTransformConfig config) {
|
||||
public PreviewTransformRequest(TransformConfig config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
public DataFrameTransformConfig getConfig() {
|
||||
public TransformConfig getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
|
@ -51,11 +51,11 @@ public class PreviewDataFrameTransformRequest implements ToXContentObject, Valid
|
|||
public Optional<ValidationException> validate() {
|
||||
ValidationException validationException = new ValidationException();
|
||||
if (config == null) {
|
||||
validationException.addValidationError("preview requires a non-null data frame config");
|
||||
validationException.addValidationError("preview requires a non-null transform config");
|
||||
return Optional.of(validationException);
|
||||
} else {
|
||||
if (config.getSource() == null) {
|
||||
validationException.addValidationError("data frame transform source cannot be null");
|
||||
validationException.addValidationError("transform source cannot be null");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ public class PreviewDataFrameTransformRequest implements ToXContentObject, Valid
|
|||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
PreviewDataFrameTransformRequest other = (PreviewDataFrameTransformRequest) obj;
|
||||
PreviewTransformRequest other = (PreviewTransformRequest) obj;
|
||||
return Objects.equals(config, other.config);
|
||||
}
|
||||
}
|
|
@ -26,23 +26,23 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PreviewDataFrameTransformResponse {
|
||||
public class PreviewTransformResponse {
|
||||
|
||||
private static final String PREVIEW = "preview";
|
||||
private static final String MAPPINGS = "mappings";
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static PreviewDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
public static PreviewTransformResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
Map<String, Object> previewMap = parser.mapOrdered();
|
||||
Object previewDocs = previewMap.get(PREVIEW);
|
||||
Object mappings = previewMap.get(MAPPINGS);
|
||||
return new PreviewDataFrameTransformResponse((List<Map<String, Object>>) previewDocs, (Map<String, Object>) mappings);
|
||||
return new PreviewTransformResponse((List<Map<String, Object>>) previewDocs, (Map<String, Object>) mappings);
|
||||
}
|
||||
|
||||
private List<Map<String, Object>> docs;
|
||||
private Map<String, Object> mappings;
|
||||
|
||||
public PreviewDataFrameTransformResponse(List<Map<String, Object>> docs, Map<String, Object> mappings) {
|
||||
public PreviewTransformResponse(List<Map<String, Object>> docs, Map<String, Object> mappings) {
|
||||
this.docs = docs;
|
||||
this.mappings = mappings;
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ public class PreviewDataFrameTransformResponse {
|
|||
return false;
|
||||
}
|
||||
|
||||
PreviewDataFrameTransformResponse other = (PreviewDataFrameTransformResponse) obj;
|
||||
PreviewTransformResponse other = (PreviewTransformResponse) obj;
|
||||
return Objects.equals(other.docs, docs) && Objects.equals(other.mappings, mappings);
|
||||
}
|
||||
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.client.transform;
|
|||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
@ -29,17 +29,17 @@ import java.io.IOException;
|
|||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class PutDataFrameTransformRequest implements ToXContentObject, Validatable {
|
||||
public class PutTransformRequest implements ToXContentObject, Validatable {
|
||||
|
||||
public static final String DEFER_VALIDATION = "defer_validation";
|
||||
private final DataFrameTransformConfig config;
|
||||
private final TransformConfig config;
|
||||
private Boolean deferValidation;
|
||||
|
||||
public PutDataFrameTransformRequest(DataFrameTransformConfig config) {
|
||||
public PutTransformRequest(TransformConfig config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
public DataFrameTransformConfig getConfig() {
|
||||
public TransformConfig getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
|
@ -60,17 +60,17 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab
|
|||
public Optional<ValidationException> validate() {
|
||||
ValidationException validationException = new ValidationException();
|
||||
if (config == null) {
|
||||
validationException.addValidationError("put requires a non-null data frame config");
|
||||
validationException.addValidationError("put requires a non-null transform config");
|
||||
return Optional.of(validationException);
|
||||
} else {
|
||||
if (config.getId() == null) {
|
||||
validationException.addValidationError("data frame transform id cannot be null");
|
||||
validationException.addValidationError("transform id cannot be null");
|
||||
}
|
||||
if (config.getSource() == null) {
|
||||
validationException.addValidationError("data frame transform source cannot be null");
|
||||
validationException.addValidationError("transform source cannot be null");
|
||||
}
|
||||
if (config.getDestination() == null) {
|
||||
validationException.addValidationError("data frame transform destination cannot be null");
|
||||
validationException.addValidationError("transform destination cannot be null");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab
|
|||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
PutDataFrameTransformRequest other = (PutDataFrameTransformRequest) obj;
|
||||
PutTransformRequest other = (PutTransformRequest) obj;
|
||||
return Objects.equals(config, other.config);
|
||||
}
|
||||
}
|
|
@ -26,16 +26,16 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class StartDataFrameTransformRequest implements Validatable {
|
||||
public class StartTransformRequest implements Validatable {
|
||||
|
||||
private final String id;
|
||||
private TimeValue timeout;
|
||||
|
||||
public StartDataFrameTransformRequest(String id) {
|
||||
public StartTransformRequest(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public StartDataFrameTransformRequest(String id, TimeValue timeout) {
|
||||
public StartTransformRequest(String id, TimeValue timeout) {
|
||||
this.id = id;
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ public class StartDataFrameTransformRequest implements Validatable {
|
|||
public Optional<ValidationException> validate() {
|
||||
if (id == null) {
|
||||
ValidationException validationException = new ValidationException();
|
||||
validationException.addValidationError("data frame transform id must not be null");
|
||||
validationException.addValidationError("transform id must not be null");
|
||||
return Optional.of(validationException);
|
||||
} else {
|
||||
return Optional.empty();
|
||||
|
@ -77,7 +77,7 @@ public class StartDataFrameTransformRequest implements Validatable {
|
|||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
StartDataFrameTransformRequest other = (StartDataFrameTransformRequest) obj;
|
||||
StartTransformRequest other = (StartTransformRequest) obj;
|
||||
return Objects.equals(this.id, other.id)
|
||||
&& Objects.equals(this.timeout, other.timeout);
|
||||
}
|
|
@ -28,20 +28,20 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class StartDataFrameTransformResponse extends AcknowledgedTasksResponse {
|
||||
public class StartTransformResponse extends AcknowledgedTasksResponse {
|
||||
|
||||
private static final String ACKNOWLEDGED = "acknowledged";
|
||||
|
||||
private static final ConstructingObjectParser<StartDataFrameTransformResponse, Void> PARSER =
|
||||
AcknowledgedTasksResponse.generateParser("start_data_frame_transform_response", StartDataFrameTransformResponse::new,
|
||||
private static final ConstructingObjectParser<StartTransformResponse, Void> PARSER =
|
||||
AcknowledgedTasksResponse.generateParser("start_transform_response", StartTransformResponse::new,
|
||||
ACKNOWLEDGED);
|
||||
|
||||
public static StartDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
public static StartTransformResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
public StartDataFrameTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
|
||||
@Nullable List<? extends ElasticsearchException> nodeFailures) {
|
||||
public StartTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
|
||||
@Nullable List<? extends ElasticsearchException> nodeFailures) {
|
||||
super(acknowledged, taskFailures, nodeFailures);
|
||||
}
|
||||
|
|
@ -26,20 +26,20 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class StopDataFrameTransformRequest implements Validatable {
|
||||
public class StopTransformRequest implements Validatable {
|
||||
|
||||
private final String id;
|
||||
private Boolean waitForCompletion;
|
||||
private TimeValue timeout;
|
||||
private Boolean allowNoMatch;
|
||||
|
||||
public StopDataFrameTransformRequest(String id) {
|
||||
public StopTransformRequest(String id) {
|
||||
this.id = id;
|
||||
waitForCompletion = null;
|
||||
timeout = null;
|
||||
}
|
||||
|
||||
public StopDataFrameTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout) {
|
||||
public StopTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout) {
|
||||
this.id = id;
|
||||
this.waitForCompletion = waitForCompletion;
|
||||
this.timeout = timeout;
|
||||
|
@ -77,7 +77,7 @@ public class StopDataFrameTransformRequest implements Validatable {
|
|||
public Optional<ValidationException> validate() {
|
||||
if (id == null) {
|
||||
ValidationException validationException = new ValidationException();
|
||||
validationException.addValidationError("data frame transform id must not be null");
|
||||
validationException.addValidationError("transform id must not be null");
|
||||
return Optional.of(validationException);
|
||||
} else {
|
||||
return Optional.empty();
|
||||
|
@ -98,7 +98,7 @@ public class StopDataFrameTransformRequest implements Validatable {
|
|||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
StopDataFrameTransformRequest other = (StopDataFrameTransformRequest) obj;
|
||||
StopTransformRequest other = (StopTransformRequest) obj;
|
||||
return Objects.equals(this.id, other.id)
|
||||
&& Objects.equals(this.waitForCompletion, other.waitForCompletion)
|
||||
&& Objects.equals(this.timeout, other.timeout)
|
|
@ -28,19 +28,19 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class StopDataFrameTransformResponse extends AcknowledgedTasksResponse {
|
||||
public class StopTransformResponse extends AcknowledgedTasksResponse {
|
||||
|
||||
private static final String ACKNOWLEDGED = "acknowledged";
|
||||
|
||||
private static final ConstructingObjectParser<StopDataFrameTransformResponse, Void> PARSER = AcknowledgedTasksResponse
|
||||
.generateParser("stop_data_frame_transform_response", StopDataFrameTransformResponse::new, ACKNOWLEDGED);
|
||||
private static final ConstructingObjectParser<StopTransformResponse, Void> PARSER = AcknowledgedTasksResponse
|
||||
.generateParser("stop_transform_response", StopTransformResponse::new, ACKNOWLEDGED);
|
||||
|
||||
public static StopDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
public static StopTransformResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
public StopDataFrameTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
|
||||
@Nullable List<? extends ElasticsearchException> nodeFailures) {
|
||||
public StopTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
|
||||
@Nullable List<? extends ElasticsearchException> nodeFailures) {
|
||||
super(acknowledged, taskFailures, nodeFailures);
|
||||
}
|
||||
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.plugins.spi.NamedXContentProvider;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class DataFrameNamedXContentProvider implements NamedXContentProvider {
|
||||
public class TransformNamedXContentProvider implements NamedXContentProvider {
|
||||
|
||||
@Override
|
||||
public List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.client.transform;
|
|||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
@ -29,18 +29,18 @@ import java.io.IOException;
|
|||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class UpdateDataFrameTransformRequest implements ToXContentObject, Validatable {
|
||||
public class UpdateTransformRequest implements ToXContentObject, Validatable {
|
||||
|
||||
private final DataFrameTransformConfigUpdate update;
|
||||
private final TransformConfigUpdate update;
|
||||
private final String id;
|
||||
private Boolean deferValidation;
|
||||
|
||||
public UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate update, String id) {
|
||||
public UpdateTransformRequest(TransformConfigUpdate update, String id) {
|
||||
this.update = update;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public DataFrameTransformConfigUpdate getUpdate() {
|
||||
public TransformConfigUpdate getUpdate() {
|
||||
return update;
|
||||
}
|
||||
|
||||
|
@ -65,10 +65,10 @@ public class UpdateDataFrameTransformRequest implements ToXContentObject, Valida
|
|||
public Optional<ValidationException> validate() {
|
||||
ValidationException validationException = new ValidationException();
|
||||
if (update == null) {
|
||||
validationException.addValidationError("put requires a non-null data frame config update object");
|
||||
validationException.addValidationError("put requires a non-null transform config update object");
|
||||
}
|
||||
if (id == null) {
|
||||
validationException.addValidationError("data frame transform id cannot be null");
|
||||
validationException.addValidationError("transform id cannot be null");
|
||||
}
|
||||
if (validationException.validationErrors().isEmpty()) {
|
||||
return Optional.empty();
|
||||
|
@ -95,7 +95,7 @@ public class UpdateDataFrameTransformRequest implements ToXContentObject, Valida
|
|||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
UpdateDataFrameTransformRequest other = (UpdateDataFrameTransformRequest) obj;
|
||||
UpdateTransformRequest other = (UpdateTransformRequest) obj;
|
||||
return Objects.equals(update, other.update)
|
||||
&& Objects.equals(id, other.id)
|
||||
&& Objects.equals(deferValidation, other.deferValidation);
|
|
@ -19,24 +19,24 @@
|
|||
|
||||
package org.elasticsearch.client.transform;
|
||||
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class UpdateDataFrameTransformResponse {
|
||||
public class UpdateTransformResponse {
|
||||
|
||||
public static UpdateDataFrameTransformResponse fromXContent(final XContentParser parser) {
|
||||
return new UpdateDataFrameTransformResponse(DataFrameTransformConfig.PARSER.apply(parser, null));
|
||||
public static UpdateTransformResponse fromXContent(final XContentParser parser) {
|
||||
return new UpdateTransformResponse(TransformConfig.PARSER.apply(parser, null));
|
||||
}
|
||||
|
||||
private DataFrameTransformConfig transformConfiguration;
|
||||
private TransformConfig transformConfiguration;
|
||||
|
||||
public UpdateDataFrameTransformResponse(DataFrameTransformConfig transformConfiguration) {
|
||||
public UpdateTransformResponse(TransformConfig transformConfiguration) {
|
||||
this.transformConfiguration = transformConfiguration;
|
||||
}
|
||||
|
||||
public DataFrameTransformConfig getTransformConfiguration() {
|
||||
public TransformConfig getTransformConfiguration() {
|
||||
return transformConfiguration;
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ public class UpdateDataFrameTransformResponse {
|
|||
return false;
|
||||
}
|
||||
|
||||
final UpdateDataFrameTransformResponse that = (UpdateDataFrameTransformResponse) other;
|
||||
final UpdateTransformResponse that = (UpdateTransformResponse) other;
|
||||
return Objects.equals(this.transformConfiguration, that.transformConfiguration);
|
||||
}
|
||||
}
|
|
@ -31,14 +31,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* Configuration containing the destination index for the {@link DataFrameTransformConfig}
|
||||
* Configuration containing the destination index for the {@link TransformConfig}
|
||||
*/
|
||||
public class DestConfig implements ToXContentObject {
|
||||
|
||||
public static final ParseField INDEX = new ParseField("index");
|
||||
public static final ParseField PIPELINE = new ParseField("pipeline");
|
||||
|
||||
public static final ConstructingObjectParser<DestConfig, Void> PARSER = new ConstructingObjectParser<>("data_frame_config_dest",
|
||||
public static final ConstructingObjectParser<DestConfig, Void> PARSER = new ConstructingObjectParser<>("transform_config_dest",
|
||||
true,
|
||||
args -> new DestConfig((String)args[0], (String)args[1]));
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.io.IOException;
|
|||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Object for encapsulating the desired Query for a DataFrameTransform
|
||||
* Object for encapsulating the desired Query for a Transform
|
||||
*/
|
||||
public class QueryConfig implements ToXContentObject {
|
||||
|
||||
|
|
|
@ -35,14 +35,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
|
|||
|
||||
|
||||
/**
|
||||
* Class encapsulating all options for a {@link DataFrameTransformConfig} gathering data
|
||||
* Class encapsulating all options for a {@link TransformConfig} gathering data
|
||||
*/
|
||||
public class SourceConfig implements ToXContentObject {
|
||||
|
||||
public static final ParseField QUERY = new ParseField("query");
|
||||
public static final ParseField INDEX = new ParseField("index");
|
||||
|
||||
public static final ConstructingObjectParser<SourceConfig, Void> PARSER = new ConstructingObjectParser<>("data_frame_config_source",
|
||||
public static final ConstructingObjectParser<SourceConfig, Void> PARSER = new ConstructingObjectParser<>("transform_config_source",
|
||||
true,
|
||||
args -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.Objects;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class DataFrameTransformCheckpointStats {
|
||||
public class TransformCheckpointStats {
|
||||
|
||||
public static final ParseField CHECKPOINT = new ParseField("checkpoint");
|
||||
public static final ParseField POSITION = new ParseField("position");
|
||||
|
@ -36,40 +36,40 @@ public class DataFrameTransformCheckpointStats {
|
|||
public static final ParseField TIMESTAMP_MILLIS = new ParseField("timestamp_millis");
|
||||
public static final ParseField TIME_UPPER_BOUND_MILLIS = new ParseField("time_upper_bound_millis");
|
||||
|
||||
public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, 0L, 0L);
|
||||
public static final TransformCheckpointStats EMPTY = new TransformCheckpointStats(0L, null, null, 0L, 0L);
|
||||
|
||||
private final long checkpoint;
|
||||
private final DataFrameIndexerPosition position;
|
||||
private final DataFrameTransformProgress checkpointProgress;
|
||||
private final TransformIndexerPosition position;
|
||||
private final TransformProgress checkpointProgress;
|
||||
private final long timestampMillis;
|
||||
private final long timeUpperBoundMillis;
|
||||
|
||||
public static final ConstructingObjectParser<DataFrameTransformCheckpointStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
|
||||
"data_frame_transform_checkpoint_stats", true, args -> {
|
||||
public static final ConstructingObjectParser<TransformCheckpointStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
|
||||
"transform_checkpoint_stats", true, args -> {
|
||||
long checkpoint = args[0] == null ? 0L : (Long) args[0];
|
||||
DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[1];
|
||||
DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[2];
|
||||
TransformIndexerPosition position = (TransformIndexerPosition) args[1];
|
||||
TransformProgress checkpointProgress = (TransformProgress) args[2];
|
||||
long timestamp = args[3] == null ? 0L : (Long) args[3];
|
||||
long timeUpperBound = args[4] == null ? 0L : (Long) args[4];
|
||||
|
||||
return new DataFrameTransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound);
|
||||
return new TransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound);
|
||||
});
|
||||
|
||||
static {
|
||||
LENIENT_PARSER.declareLong(optionalConstructorArg(), CHECKPOINT);
|
||||
LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameIndexerPosition.PARSER, POSITION);
|
||||
LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameTransformProgress.PARSER, CHECKPOINT_PROGRESS);
|
||||
LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformIndexerPosition.PARSER, POSITION);
|
||||
LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformProgress.PARSER, CHECKPOINT_PROGRESS);
|
||||
LENIENT_PARSER.declareLong(optionalConstructorArg(), TIMESTAMP_MILLIS);
|
||||
LENIENT_PARSER.declareLong(optionalConstructorArg(), TIME_UPPER_BOUND_MILLIS);
|
||||
}
|
||||
|
||||
public static DataFrameTransformCheckpointStats fromXContent(XContentParser parser) throws IOException {
|
||||
public static TransformCheckpointStats fromXContent(XContentParser parser) throws IOException {
|
||||
return LENIENT_PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
public DataFrameTransformCheckpointStats(final long checkpoint, final DataFrameIndexerPosition position,
|
||||
final DataFrameTransformProgress checkpointProgress, final long timestampMillis,
|
||||
final long timeUpperBoundMillis) {
|
||||
public TransformCheckpointStats(final long checkpoint, final TransformIndexerPosition position,
|
||||
final TransformProgress checkpointProgress, final long timestampMillis,
|
||||
final long timeUpperBoundMillis) {
|
||||
this.checkpoint = checkpoint;
|
||||
this.position = position;
|
||||
this.checkpointProgress = checkpointProgress;
|
||||
|
@ -81,11 +81,11 @@ public class DataFrameTransformCheckpointStats {
|
|||
return checkpoint;
|
||||
}
|
||||
|
||||
public DataFrameIndexerPosition getPosition() {
|
||||
public TransformIndexerPosition getPosition() {
|
||||
return position;
|
||||
}
|
||||
|
||||
public DataFrameTransformProgress getCheckpointProgress() {
|
||||
public TransformProgress getCheckpointProgress() {
|
||||
return checkpointProgress;
|
||||
}
|
||||
|
||||
|
@ -112,7 +112,7 @@ public class DataFrameTransformCheckpointStats {
|
|||
return false;
|
||||
}
|
||||
|
||||
DataFrameTransformCheckpointStats that = (DataFrameTransformCheckpointStats) other;
|
||||
TransformCheckpointStats that = (TransformCheckpointStats) other;
|
||||
|
||||
return this.checkpoint == that.checkpoint
|
||||
&& Objects.equals(this.position, that.position)
|
|
@ -29,37 +29,37 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import java.time.Instant;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DataFrameTransformCheckpointingInfo {
|
||||
public class TransformCheckpointingInfo {
|
||||
|
||||
public static final ParseField LAST_CHECKPOINT = new ParseField("last", "current");
|
||||
public static final ParseField NEXT_CHECKPOINT = new ParseField("next", "in_progress");
|
||||
public static final ParseField OPERATIONS_BEHIND = new ParseField("operations_behind");
|
||||
public static final ParseField CHANGES_LAST_DETECTED_AT = new ParseField("changes_last_detected_at");
|
||||
|
||||
private final DataFrameTransformCheckpointStats last;
|
||||
private final DataFrameTransformCheckpointStats next;
|
||||
private final TransformCheckpointStats last;
|
||||
private final TransformCheckpointStats next;
|
||||
private final long operationsBehind;
|
||||
private final Instant changesLastDetectedAt;
|
||||
|
||||
private static final ConstructingObjectParser<DataFrameTransformCheckpointingInfo, Void> LENIENT_PARSER =
|
||||
private static final ConstructingObjectParser<TransformCheckpointingInfo, Void> LENIENT_PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"data_frame_transform_checkpointing_info",
|
||||
"transform_checkpointing_info",
|
||||
true,
|
||||
a -> {
|
||||
long behind = a[2] == null ? 0L : (Long) a[2];
|
||||
Instant changesLastDetectedAt = (Instant)a[3];
|
||||
return new DataFrameTransformCheckpointingInfo(
|
||||
a[0] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[0],
|
||||
a[1] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[1],
|
||||
return new TransformCheckpointingInfo(
|
||||
a[0] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[0],
|
||||
a[1] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[1],
|
||||
behind,
|
||||
changesLastDetectedAt);
|
||||
});
|
||||
|
||||
static {
|
||||
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
|
||||
(p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT);
|
||||
(p, c) -> TransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT);
|
||||
LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
|
||||
(p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT);
|
||||
(p, c) -> TransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT);
|
||||
LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), OPERATIONS_BEHIND);
|
||||
LENIENT_PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
|
||||
p -> TimeUtil.parseTimeFieldToInstant(p, CHANGES_LAST_DETECTED_AT.getPreferredName()),
|
||||
|
@ -67,21 +67,21 @@ public class DataFrameTransformCheckpointingInfo {
|
|||
ObjectParser.ValueType.VALUE);
|
||||
}
|
||||
|
||||
public DataFrameTransformCheckpointingInfo(DataFrameTransformCheckpointStats last,
|
||||
DataFrameTransformCheckpointStats next,
|
||||
long operationsBehind,
|
||||
Instant changesLastDetectedAt) {
|
||||
public TransformCheckpointingInfo(TransformCheckpointStats last,
|
||||
TransformCheckpointStats next,
|
||||
long operationsBehind,
|
||||
Instant changesLastDetectedAt) {
|
||||
this.last = Objects.requireNonNull(last);
|
||||
this.next = Objects.requireNonNull(next);
|
||||
this.operationsBehind = operationsBehind;
|
||||
this.changesLastDetectedAt = changesLastDetectedAt;
|
||||
}
|
||||
|
||||
public DataFrameTransformCheckpointStats getLast() {
|
||||
public TransformCheckpointStats getLast() {
|
||||
return last;
|
||||
}
|
||||
|
||||
public DataFrameTransformCheckpointStats getNext() {
|
||||
public TransformCheckpointStats getNext() {
|
||||
return next;
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ public class DataFrameTransformCheckpointingInfo {
|
|||
return changesLastDetectedAt;
|
||||
}
|
||||
|
||||
public static DataFrameTransformCheckpointingInfo fromXContent(XContentParser p) {
|
||||
public static TransformCheckpointingInfo fromXContent(XContentParser p) {
|
||||
return LENIENT_PARSER.apply(p, null);
|
||||
}
|
||||
|
||||
|
@ -113,7 +113,7 @@ public class DataFrameTransformCheckpointingInfo {
|
|||
return false;
|
||||
}
|
||||
|
||||
DataFrameTransformCheckpointingInfo that = (DataFrameTransformCheckpointingInfo) other;
|
||||
TransformCheckpointingInfo that = (TransformCheckpointingInfo) other;
|
||||
|
||||
return Objects.equals(this.last, that.last) &&
|
||||
Objects.equals(this.next, that.next) &&
|
|
@ -40,7 +40,7 @@ import java.util.Objects;
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class DataFrameTransformConfig implements ToXContentObject {
|
||||
public class TransformConfig implements ToXContentObject {
|
||||
|
||||
public static final ParseField ID = new ParseField("id");
|
||||
public static final ParseField SOURCE = new ParseField("source");
|
||||
|
@ -63,8 +63,8 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
|||
private final Version transformVersion;
|
||||
private final Instant createTime;
|
||||
|
||||
public static final ConstructingObjectParser<DataFrameTransformConfig, Void> PARSER =
|
||||
new ConstructingObjectParser<>("data_frame_transform", true,
|
||||
public static final ConstructingObjectParser<TransformConfig, Void> PARSER =
|
||||
new ConstructingObjectParser<>("transform", true,
|
||||
(args) -> {
|
||||
String id = (String) args[0];
|
||||
SourceConfig source = (SourceConfig) args[1];
|
||||
|
@ -75,7 +75,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
|||
String description = (String)args[6];
|
||||
Instant createTime = (Instant)args[7];
|
||||
String transformVersion = (String)args[8];
|
||||
return new DataFrameTransformConfig(id,
|
||||
return new TransformConfig(id,
|
||||
source,
|
||||
dest,
|
||||
frequency,
|
||||
|
@ -109,34 +109,34 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
|||
}
|
||||
|
||||
|
||||
public static DataFrameTransformConfig fromXContent(final XContentParser parser) {
|
||||
public static TransformConfig fromXContent(final XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for previewing a data frame transform configuration
|
||||
* Helper method for previewing a transform configuration
|
||||
*
|
||||
* The DataFrameTransformConfig returned from this method should only be used for previewing the resulting data.
|
||||
* The TransformConfig returned from this method should only be used for previewing the resulting data.
|
||||
*
|
||||
* A new, valid, DataFrameTransformConfig with an appropriate destination and ID will have to be constructed to create
|
||||
* A new, valid, TransformConfig with an appropriate destination and ID will have to be constructed to create
|
||||
* the transform.
|
||||
* @param source Source configuration for gathering the data
|
||||
* @param pivotConfig Pivot config to preview
|
||||
* @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
|
||||
* @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
|
||||
*/
|
||||
public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
|
||||
return new DataFrameTransformConfig(null, source, null, null, null, pivotConfig, null, null, null);
|
||||
public static TransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
|
||||
return new TransformConfig(null, source, null, null, null, pivotConfig, null, null, null);
|
||||
}
|
||||
|
||||
DataFrameTransformConfig(final String id,
|
||||
final SourceConfig source,
|
||||
final DestConfig dest,
|
||||
final TimeValue frequency,
|
||||
final SyncConfig syncConfig,
|
||||
final PivotConfig pivotConfig,
|
||||
final String description,
|
||||
final Instant createTime,
|
||||
final String version) {
|
||||
TransformConfig(final String id,
|
||||
final SourceConfig source,
|
||||
final DestConfig dest,
|
||||
final TimeValue frequency,
|
||||
final SyncConfig syncConfig,
|
||||
final PivotConfig pivotConfig,
|
||||
final String description,
|
||||
final Instant createTime,
|
||||
final String version) {
|
||||
this.id = id;
|
||||
this.source = source;
|
||||
this.dest = dest;
|
||||
|
@ -231,7 +231,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
|||
return false;
|
||||
}
|
||||
|
||||
final DataFrameTransformConfig that = (DataFrameTransformConfig) other;
|
||||
final TransformConfig that = (TransformConfig) other;
|
||||
|
||||
return Objects.equals(this.id, that.id)
|
||||
&& Objects.equals(this.source, that.source)
|
||||
|
@ -303,8 +303,8 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
|||
return this;
|
||||
}
|
||||
|
||||
public DataFrameTransformConfig build() {
|
||||
return new DataFrameTransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null);
|
||||
public TransformConfig build() {
|
||||
return new TransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -34,30 +34,30 @@ import java.util.Objects;
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* This class holds the mutable configuration items for a data frame transform
|
||||
* This class holds the mutable configuration items for a transform
|
||||
*/
|
||||
public class DataFrameTransformConfigUpdate implements ToXContentObject {
|
||||
public class TransformConfigUpdate implements ToXContentObject {
|
||||
|
||||
public static final String NAME = "data_frame_transform_config_update";
|
||||
private static final ConstructingObjectParser<DataFrameTransformConfigUpdate, String> PARSER = new ConstructingObjectParser<>(NAME,
|
||||
public static final String NAME = "transform_config_update";
|
||||
private static final ConstructingObjectParser<TransformConfigUpdate, String> PARSER = new ConstructingObjectParser<>(NAME,
|
||||
false,
|
||||
(args) -> {
|
||||
SourceConfig source = (SourceConfig) args[0];
|
||||
DestConfig dest = (DestConfig) args[1];
|
||||
TimeValue frequency = args[2] == null ?
|
||||
null :
|
||||
TimeValue.parseTimeValue((String) args[2], DataFrameTransformConfig.FREQUENCY.getPreferredName());
|
||||
TimeValue.parseTimeValue((String) args[2], TransformConfig.FREQUENCY.getPreferredName());
|
||||
SyncConfig syncConfig = (SyncConfig) args[3];
|
||||
String description = (String) args[4];
|
||||
return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description);
|
||||
return new TransformConfigUpdate(source, dest, frequency, syncConfig, description);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), DataFrameTransformConfig.SOURCE);
|
||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DataFrameTransformConfig.DEST);
|
||||
PARSER.declareString(optionalConstructorArg(), DataFrameTransformConfig.FREQUENCY);
|
||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), DataFrameTransformConfig.SYNC);
|
||||
PARSER.declareString(optionalConstructorArg(), DataFrameTransformConfig.DESCRIPTION);
|
||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), TransformConfig.SOURCE);
|
||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), TransformConfig.DEST);
|
||||
PARSER.declareString(optionalConstructorArg(), TransformConfig.FREQUENCY);
|
||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), TransformConfig.SYNC);
|
||||
PARSER.declareString(optionalConstructorArg(), TransformConfig.DESCRIPTION);
|
||||
}
|
||||
|
||||
private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException {
|
||||
|
@ -74,11 +74,11 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
|
|||
private final SyncConfig syncConfig;
|
||||
private final String description;
|
||||
|
||||
public DataFrameTransformConfigUpdate(final SourceConfig source,
|
||||
final DestConfig dest,
|
||||
final TimeValue frequency,
|
||||
final SyncConfig syncConfig,
|
||||
final String description){
|
||||
public TransformConfigUpdate(final SourceConfig source,
|
||||
final DestConfig dest,
|
||||
final TimeValue frequency,
|
||||
final SyncConfig syncConfig,
|
||||
final String description) {
|
||||
this.source = source;
|
||||
this.dest = dest;
|
||||
this.frequency = frequency;
|
||||
|
@ -111,21 +111,21 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
|
|||
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (source != null) {
|
||||
builder.field(DataFrameTransformConfig.SOURCE.getPreferredName(), source);
|
||||
builder.field(TransformConfig.SOURCE.getPreferredName(), source);
|
||||
}
|
||||
if (dest != null) {
|
||||
builder.field(DataFrameTransformConfig.DEST.getPreferredName(), dest);
|
||||
builder.field(TransformConfig.DEST.getPreferredName(), dest);
|
||||
}
|
||||
if (frequency != null) {
|
||||
builder.field(DataFrameTransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep());
|
||||
builder.field(TransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep());
|
||||
}
|
||||
if (syncConfig != null) {
|
||||
builder.startObject(DataFrameTransformConfig.SYNC.getPreferredName());
|
||||
builder.startObject(TransformConfig.SYNC.getPreferredName());
|
||||
builder.field(syncConfig.getName(), syncConfig);
|
||||
builder.endObject();
|
||||
}
|
||||
if (description != null) {
|
||||
builder.field(DataFrameTransformConfig.DESCRIPTION.getPreferredName(), description);
|
||||
builder.field(TransformConfig.DESCRIPTION.getPreferredName(), description);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
@ -141,7 +141,7 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
|
|||
return false;
|
||||
}
|
||||
|
||||
final DataFrameTransformConfigUpdate that = (DataFrameTransformConfigUpdate) other;
|
||||
final TransformConfigUpdate that = (TransformConfigUpdate) other;
|
||||
|
||||
return Objects.equals(this.source, that.source)
|
||||
&& Objects.equals(this.dest, that.dest)
|
||||
|
@ -164,7 +164,7 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
|
|||
return new Builder();
|
||||
}
|
||||
|
||||
public static DataFrameTransformConfigUpdate fromXContent(final XContentParser parser) {
|
||||
public static TransformConfigUpdate fromXContent(final XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
|
@ -201,8 +201,8 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject {
|
|||
return this;
|
||||
}
|
||||
|
||||
public DataFrameTransformConfigUpdate build() {
|
||||
return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description);
|
||||
public TransformConfigUpdate build() {
|
||||
return new TransformConfigUpdate(source, dest, frequency, syncConfig, description);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -37,7 +37,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
|
|||
* indexer_position: the position of the indexer querying the source
|
||||
* bucket_position: the position used for identifying changes
|
||||
*/
|
||||
public class DataFrameIndexerPosition {
|
||||
public class TransformIndexerPosition {
|
||||
public static final ParseField INDEXER_POSITION = new ParseField("indexer_position");
|
||||
public static final ParseField BUCKET_POSITION = new ParseField("bucket_position");
|
||||
|
||||
|
@ -45,17 +45,17 @@ public class DataFrameIndexerPosition {
|
|||
private final Map<String, Object> bucketPosition;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<DataFrameIndexerPosition, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"data_frame_indexer_position",
|
||||
public static final ConstructingObjectParser<TransformIndexerPosition, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"transform_indexer_position",
|
||||
true,
|
||||
args -> new DataFrameIndexerPosition((Map<String, Object>) args[0],(Map<String, Object>) args[1]));
|
||||
args -> new TransformIndexerPosition((Map<String, Object>) args[0],(Map<String, Object>) args[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, INDEXER_POSITION, ValueType.OBJECT);
|
||||
PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, BUCKET_POSITION, ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public DataFrameIndexerPosition(Map<String, Object> indexerPosition, Map<String, Object> bucketPosition) {
|
||||
public TransformIndexerPosition(Map<String, Object> indexerPosition, Map<String, Object> bucketPosition) {
|
||||
this.indexerPosition = indexerPosition == null ? null : Collections.unmodifiableMap(indexerPosition);
|
||||
this.bucketPosition = bucketPosition == null ? null : Collections.unmodifiableMap(bucketPosition);
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ public class DataFrameIndexerPosition {
|
|||
return false;
|
||||
}
|
||||
|
||||
DataFrameIndexerPosition that = (DataFrameIndexerPosition) other;
|
||||
TransformIndexerPosition that = (TransformIndexerPosition) other;
|
||||
|
||||
return Objects.equals(this.indexerPosition, that.indexerPosition) &&
|
||||
Objects.equals(this.bucketPosition, that.bucketPosition);
|
||||
|
@ -89,7 +89,7 @@ public class DataFrameIndexerPosition {
|
|||
return Objects.hash(indexerPosition, bucketPosition);
|
||||
}
|
||||
|
||||
public static DataFrameIndexerPosition fromXContent(XContentParser parser) {
|
||||
public static TransformIndexerPosition fromXContent(XContentParser parser) {
|
||||
try {
|
||||
return PARSER.parse(parser, null);
|
||||
} catch (IOException e) {
|
|
@ -30,16 +30,16 @@ import java.util.Objects;
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class DataFrameIndexerTransformStats extends IndexerJobStats {
|
||||
public class TransformIndexerStats extends IndexerJobStats {
|
||||
|
||||
static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms");
|
||||
static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed");
|
||||
static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed");
|
||||
|
||||
public static final ConstructingObjectParser<DataFrameIndexerTransformStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
|
||||
public static final ConstructingObjectParser<TransformIndexerStats, Void> LENIENT_PARSER = new ConstructingObjectParser<>(
|
||||
NAME,
|
||||
true,
|
||||
args -> new DataFrameIndexerTransformStats((long) args[0], (long) args[1], (long) args[2],
|
||||
args -> new TransformIndexerStats((long) args[0], (long) args[1], (long) args[2],
|
||||
(long) args[3], (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9],
|
||||
(Double) args[10], (Double) args[11], (Double) args[12]));
|
||||
|
||||
|
@ -59,7 +59,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
|
|||
LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_DOCUMENTS_PROCESSED);
|
||||
}
|
||||
|
||||
public static DataFrameIndexerTransformStats fromXContent(XContentParser parser) throws IOException {
|
||||
public static TransformIndexerStats fromXContent(XContentParser parser) throws IOException {
|
||||
return LENIENT_PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
|
@ -67,11 +67,11 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
|
|||
private final double expAvgDocumentsIndexed;
|
||||
private final double expAvgDocumentsProcessed;
|
||||
|
||||
public DataFrameIndexerTransformStats(long numPages, long numInputDocuments, long numOuputDocuments,
|
||||
long numInvocations, long indexTime, long searchTime,
|
||||
long indexTotal, long searchTotal, long indexFailures, long searchFailures,
|
||||
Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed,
|
||||
Double expAvgDocumentsProcessed) {
|
||||
public TransformIndexerStats(long numPages, long numInputDocuments, long numOuputDocuments,
|
||||
long numInvocations, long indexTime, long searchTime,
|
||||
long indexTotal, long searchTotal, long indexFailures, long searchFailures,
|
||||
Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed,
|
||||
Double expAvgDocumentsProcessed) {
|
||||
super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime,
|
||||
indexTotal, searchTotal, indexFailures, searchFailures);
|
||||
this.expAvgCheckpointDurationMs = expAvgCheckpointDurationMs == null ? 0.0 : expAvgCheckpointDurationMs;
|
||||
|
@ -101,7 +101,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats {
|
|||
return false;
|
||||
}
|
||||
|
||||
DataFrameIndexerTransformStats that = (DataFrameIndexerTransformStats) other;
|
||||
TransformIndexerStats that = (TransformIndexerStats) other;
|
||||
|
||||
return Objects.equals(this.numPages, that.numPages)
|
||||
&& Objects.equals(this.numInputDocuments, that.numInputDocuments)
|
|
@ -28,7 +28,7 @@ import java.util.Objects;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class DataFrameTransformProgress {
|
||||
public class TransformProgress {
|
||||
|
||||
public static final ParseField TOTAL_DOCS = new ParseField("total_docs");
|
||||
public static final ParseField DOCS_REMAINING = new ParseField("docs_remaining");
|
||||
|
@ -36,10 +36,10 @@ public class DataFrameTransformProgress {
|
|||
public static final ParseField DOCS_PROCESSED = new ParseField("docs_processed");
|
||||
public static final ParseField DOCS_INDEXED = new ParseField("docs_indexed");
|
||||
|
||||
public static final ConstructingObjectParser<DataFrameTransformProgress, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"data_frame_transform_progress",
|
||||
public static final ConstructingObjectParser<TransformProgress, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"transform_progress",
|
||||
true,
|
||||
a -> new DataFrameTransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4]));
|
||||
a -> new TransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4]));
|
||||
|
||||
static {
|
||||
PARSER.declareLong(optionalConstructorArg(), TOTAL_DOCS);
|
||||
|
@ -49,7 +49,7 @@ public class DataFrameTransformProgress {
|
|||
PARSER.declareLong(optionalConstructorArg(), DOCS_INDEXED);
|
||||
}
|
||||
|
||||
public static DataFrameTransformProgress fromXContent(XContentParser parser) {
|
||||
public static TransformProgress fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
|
@ -59,11 +59,11 @@ public class DataFrameTransformProgress {
|
|||
private final long documentsProcessed;
|
||||
private final long documentsIndexed;
|
||||
|
||||
public DataFrameTransformProgress(Long totalDocs,
|
||||
Long remainingDocs,
|
||||
Double percentComplete,
|
||||
Long documentsProcessed,
|
||||
Long documentsIndexed) {
|
||||
public TransformProgress(Long totalDocs,
|
||||
Long remainingDocs,
|
||||
Double percentComplete,
|
||||
Long documentsProcessed,
|
||||
Long documentsIndexed) {
|
||||
this.totalDocs = totalDocs;
|
||||
this.remainingDocs = remainingDocs == null ? totalDocs : remainingDocs;
|
||||
this.percentComplete = percentComplete;
|
||||
|
@ -104,7 +104,7 @@ public class DataFrameTransformProgress {
|
|||
return false;
|
||||
}
|
||||
|
||||
DataFrameTransformProgress that = (DataFrameTransformProgress) other;
|
||||
TransformProgress that = (TransformProgress) other;
|
||||
return Objects.equals(this.remainingDocs, that.remainingDocs)
|
||||
&& Objects.equals(this.totalDocs, that.totalDocs)
|
||||
&& Objects.equals(this.percentComplete, that.percentComplete)
|
|
@ -31,7 +31,7 @@ import java.util.Objects;
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class DataFrameTransformStats {
|
||||
public class TransformStats {
|
||||
|
||||
public static final ParseField ID = new ParseField("id");
|
||||
public static final ParseField STATE_FIELD = new ParseField("state");
|
||||
|
@ -40,10 +40,10 @@ public class DataFrameTransformStats {
|
|||
public static final ParseField STATS_FIELD = new ParseField("stats");
|
||||
public static final ParseField CHECKPOINTING_INFO_FIELD = new ParseField("checkpointing");
|
||||
|
||||
public static final ConstructingObjectParser<DataFrameTransformStats, Void> PARSER = new ConstructingObjectParser<>(
|
||||
public static final ConstructingObjectParser<TransformStats, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"data_frame_transform_state_and_stats_info", true,
|
||||
a -> new DataFrameTransformStats((String) a[0], (State) a[1], (String) a[2],
|
||||
(NodeAttributes) a[3], (DataFrameIndexerTransformStats) a[4], (DataFrameTransformCheckpointingInfo) a[5]));
|
||||
a -> new TransformStats((String) a[0], (State) a[1], (String) a[2],
|
||||
(NodeAttributes) a[3], (TransformIndexerStats) a[4], (TransformCheckpointingInfo) a[5]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(constructorArg(), ID);
|
||||
|
@ -51,12 +51,12 @@ public class DataFrameTransformStats {
|
|||
ObjectParser.ValueType.STRING);
|
||||
PARSER.declareString(optionalConstructorArg(), REASON_FIELD);
|
||||
PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
PARSER.declareObject(constructorArg(), (p, c) -> DataFrameIndexerTransformStats.fromXContent(p), STATS_FIELD);
|
||||
PARSER.declareObject(constructorArg(), (p, c) -> TransformIndexerStats.fromXContent(p), STATS_FIELD);
|
||||
PARSER.declareObject(optionalConstructorArg(),
|
||||
(p, c) -> DataFrameTransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD);
|
||||
(p, c) -> TransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD);
|
||||
}
|
||||
|
||||
public static DataFrameTransformStats fromXContent(XContentParser parser) throws IOException {
|
||||
public static TransformStats fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
|
@ -64,11 +64,11 @@ public class DataFrameTransformStats {
|
|||
private final String reason;
|
||||
private final State state;
|
||||
private final NodeAttributes node;
|
||||
private final DataFrameIndexerTransformStats indexerStats;
|
||||
private final DataFrameTransformCheckpointingInfo checkpointingInfo;
|
||||
private final TransformIndexerStats indexerStats;
|
||||
private final TransformCheckpointingInfo checkpointingInfo;
|
||||
|
||||
public DataFrameTransformStats(String id, State state, String reason, NodeAttributes node, DataFrameIndexerTransformStats stats,
|
||||
DataFrameTransformCheckpointingInfo checkpointingInfo) {
|
||||
public TransformStats(String id, State state, String reason, NodeAttributes node, TransformIndexerStats stats,
|
||||
TransformCheckpointingInfo checkpointingInfo) {
|
||||
this.id = id;
|
||||
this.state = state;
|
||||
this.reason = reason;
|
||||
|
@ -93,11 +93,11 @@ public class DataFrameTransformStats {
|
|||
return node;
|
||||
}
|
||||
|
||||
public DataFrameIndexerTransformStats getIndexerStats() {
|
||||
public TransformIndexerStats getIndexerStats() {
|
||||
return indexerStats;
|
||||
}
|
||||
|
||||
public DataFrameTransformCheckpointingInfo getCheckpointingInfo() {
|
||||
public TransformCheckpointingInfo getCheckpointingInfo() {
|
||||
return checkpointingInfo;
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ public class DataFrameTransformStats {
|
|||
return false;
|
||||
}
|
||||
|
||||
DataFrameTransformStats that = (DataFrameTransformStats) other;
|
||||
TransformStats that = (TransformStats) other;
|
||||
|
||||
return Objects.equals(this.id, that.id)
|
||||
&& Objects.equals(this.state, that.state)
|
|
@ -72,7 +72,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
|
|||
* fixed_interval fixed intervals like 1h, 1m, 1d
|
||||
* calendar_interval calendar aware intervals like 1M, 1Y, ...
|
||||
*
|
||||
* Note: data frames do not support the deprecated interval option
|
||||
* Note: transform does not support the deprecated interval option
|
||||
*/
|
||||
public interface Interval extends ToXContentFragment {
|
||||
String getName();
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
org.elasticsearch.client.indexlifecycle.IndexLifecycleNamedXContentProvider
|
||||
org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider
|
||||
org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider
|
||||
org.elasticsearch.client.transform.DataFrameNamedXContentProvider
|
||||
org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider
|
||||
org.elasticsearch.client.transform.TransformNamedXContentProvider
|
||||
|
|
|
@ -24,19 +24,19 @@ import org.apache.http.client.methods.HttpGet;
|
|||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.core.PageParams;
|
||||
import org.elasticsearch.client.transform.DataFrameNamedXContentProvider;
|
||||
import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.PutDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdateTests;
|
||||
import org.elasticsearch.client.transform.TransformNamedXContentProvider;
|
||||
import org.elasticsearch.client.transform.DeleteTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.PreviewTransformRequest;
|
||||
import org.elasticsearch.client.transform.PutTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateTransformRequest;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigUpdateTests;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -49,7 +49,7 @@ import java.io.IOException;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.client.transform.GetDataFrameTransformRequest.ALLOW_NO_MATCH;
|
||||
import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
|
@ -62,50 +62,50 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
|
|||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
|
||||
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
|
||||
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
|
||||
return new NamedXContentRegistry(namedXContents);
|
||||
}
|
||||
|
||||
public void testPutDataFrameTransform() throws IOException {
|
||||
PutDataFrameTransformRequest putRequest = new PutDataFrameTransformRequest(
|
||||
DataFrameTransformConfigTests.randomDataFrameTransformConfig());
|
||||
Request request = DataFrameRequestConverters.putDataFrameTransform(putRequest);
|
||||
PutTransformRequest putRequest = new PutTransformRequest(
|
||||
TransformConfigTests.randomTransformConfig());
|
||||
Request request = TransformRequestConverters.putTransform(putRequest);
|
||||
assertThat(request.getParameters(), not(hasKey("defer_validation")));
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + putRequest.getConfig().getId()));
|
||||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
DataFrameTransformConfig parsedConfig = DataFrameTransformConfig.PARSER.apply(parser, null);
|
||||
TransformConfig parsedConfig = TransformConfig.PARSER.apply(parser, null);
|
||||
assertThat(parsedConfig, equalTo(putRequest.getConfig()));
|
||||
}
|
||||
putRequest.setDeferValidation(true);
|
||||
request = DataFrameRequestConverters.putDataFrameTransform(putRequest);
|
||||
request = TransformRequestConverters.putTransform(putRequest);
|
||||
assertThat(request.getParameters(), hasEntry("defer_validation", Boolean.toString(putRequest.getDeferValidation())));
|
||||
}
|
||||
|
||||
public void testUpdateDataFrameTransform() throws IOException {
|
||||
String transformId = randomAlphaOfLength(10);
|
||||
UpdateDataFrameTransformRequest updateDataFrameTransformRequest = new UpdateDataFrameTransformRequest(
|
||||
DataFrameTransformConfigUpdateTests.randomDataFrameTransformConfigUpdate(),
|
||||
UpdateTransformRequest updateDataFrameTransformRequest = new UpdateTransformRequest(
|
||||
TransformConfigUpdateTests.randomTransformConfigUpdate(),
|
||||
transformId);
|
||||
Request request = DataFrameRequestConverters.updateDataFrameTransform(updateDataFrameTransformRequest);
|
||||
Request request = TransformRequestConverters.updateTransform(updateDataFrameTransformRequest);
|
||||
assertThat(request.getParameters(), not(hasKey("defer_validation")));
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + transformId + "/_update"));
|
||||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
DataFrameTransformConfigUpdate parsedConfig = DataFrameTransformConfigUpdate.fromXContent(parser);
|
||||
TransformConfigUpdate parsedConfig = TransformConfigUpdate.fromXContent(parser);
|
||||
assertThat(parsedConfig, equalTo(updateDataFrameTransformRequest.getUpdate()));
|
||||
}
|
||||
updateDataFrameTransformRequest.setDeferValidation(true);
|
||||
request = DataFrameRequestConverters.updateDataFrameTransform(updateDataFrameTransformRequest);
|
||||
request = TransformRequestConverters.updateTransform(updateDataFrameTransformRequest);
|
||||
assertThat(request.getParameters(),
|
||||
hasEntry("defer_validation", Boolean.toString(updateDataFrameTransformRequest.getDeferValidation())));
|
||||
}
|
||||
|
||||
public void testDeleteDataFrameTransform() {
|
||||
DeleteDataFrameTransformRequest deleteRequest = new DeleteDataFrameTransformRequest("foo");
|
||||
Request request = DataFrameRequestConverters.deleteDataFrameTransform(deleteRequest);
|
||||
DeleteTransformRequest deleteRequest = new DeleteTransformRequest("foo");
|
||||
Request request = TransformRequestConverters.deleteTransform(deleteRequest);
|
||||
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo"));
|
||||
|
@ -113,7 +113,7 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
|
|||
assertThat(request.getParameters(), not(hasKey("force")));
|
||||
|
||||
deleteRequest.setForce(true);
|
||||
request = DataFrameRequestConverters.deleteDataFrameTransform(deleteRequest);
|
||||
request = TransformRequestConverters.deleteTransform(deleteRequest);
|
||||
|
||||
assertThat(request.getParameters(), hasEntry("force", "true"));
|
||||
}
|
||||
|
@ -124,9 +124,9 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
|
|||
if (randomBoolean()) {
|
||||
timeValue = TimeValue.parseTimeValue(randomTimeValue(), "timeout");
|
||||
}
|
||||
StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id, timeValue);
|
||||
StartTransformRequest startRequest = new StartTransformRequest(id, timeValue);
|
||||
|
||||
Request request = DataFrameRequestConverters.startDataFrameTransform(startRequest);
|
||||
Request request = TransformRequestConverters.startTransform(startRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + startRequest.getId() + "/_start"));
|
||||
|
||||
|
@ -148,9 +148,9 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
|
|||
if (randomBoolean()) {
|
||||
timeValue = TimeValue.parseTimeValue(randomTimeValue(), "timeout");
|
||||
}
|
||||
StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, waitForCompletion, timeValue);
|
||||
StopTransformRequest stopRequest = new StopTransformRequest(id, waitForCompletion, timeValue);
|
||||
|
||||
Request request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest);
|
||||
Request request = TransformRequestConverters.stopTransform(stopRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + stopRequest.getId() + "/_stop"));
|
||||
|
||||
|
@ -170,27 +170,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
|
|||
|
||||
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
|
||||
stopRequest.setAllowNoMatch(randomBoolean());
|
||||
request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest);
|
||||
request = TransformRequestConverters.stopTransform(stopRequest);
|
||||
assertEquals(stopRequest.getAllowNoMatch(), Boolean.parseBoolean(request.getParameters().get(ALLOW_NO_MATCH)));
|
||||
}
|
||||
|
||||
public void testPreviewDataFrameTransform() throws IOException {
|
||||
PreviewDataFrameTransformRequest previewRequest = new PreviewDataFrameTransformRequest(
|
||||
DataFrameTransformConfigTests.randomDataFrameTransformConfig());
|
||||
Request request = DataFrameRequestConverters.previewDataFrameTransform(previewRequest);
|
||||
PreviewTransformRequest previewRequest = new PreviewTransformRequest(
|
||||
TransformConfigTests.randomTransformConfig());
|
||||
Request request = TransformRequestConverters.previewTransform(previewRequest);
|
||||
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/_preview"));
|
||||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
DataFrameTransformConfig parsedConfig = DataFrameTransformConfig.PARSER.apply(parser, null);
|
||||
TransformConfig parsedConfig = TransformConfig.PARSER.apply(parser, null);
|
||||
assertThat(parsedConfig, equalTo(previewRequest.getConfig()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetDataFrameTransformStats() {
|
||||
GetDataFrameTransformStatsRequest getStatsRequest = new GetDataFrameTransformStatsRequest("foo");
|
||||
Request request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
|
||||
GetTransformStatsRequest getStatsRequest = new GetTransformStatsRequest("foo");
|
||||
Request request = TransformRequestConverters.getTransformStats(getStatsRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo/_stats"));
|
||||
|
@ -200,27 +200,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
|
|||
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
|
||||
|
||||
getStatsRequest.setPageParams(new PageParams(0, null));
|
||||
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
|
||||
request = TransformRequestConverters.getTransformStats(getStatsRequest);
|
||||
assertThat(request.getParameters(), hasEntry("from", "0"));
|
||||
assertEquals(null, request.getParameters().get("size"));
|
||||
|
||||
getStatsRequest.setPageParams(new PageParams(null, 50));
|
||||
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
|
||||
request = TransformRequestConverters.getTransformStats(getStatsRequest);
|
||||
assertEquals(null, request.getParameters().get("from"));
|
||||
assertThat(request.getParameters(), hasEntry("size", "50"));
|
||||
|
||||
getStatsRequest.setPageParams(new PageParams(0, 10));
|
||||
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
|
||||
request = TransformRequestConverters.getTransformStats(getStatsRequest);
|
||||
assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10")));
|
||||
|
||||
getStatsRequest.setAllowNoMatch(false);
|
||||
request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest);
|
||||
request = TransformRequestConverters.getTransformStats(getStatsRequest);
|
||||
assertThat(request.getParameters(), hasEntry("allow_no_match", "false"));
|
||||
}
|
||||
|
||||
public void testGetDataFrameTransform() {
|
||||
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("bar");
|
||||
Request request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
|
||||
GetTransformRequest getRequest = new GetTransformRequest("bar");
|
||||
Request request = TransformRequestConverters.getTransform(getRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/bar"));
|
||||
|
@ -230,27 +230,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase {
|
|||
assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH));
|
||||
|
||||
getRequest.setPageParams(new PageParams(0, null));
|
||||
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
|
||||
request = TransformRequestConverters.getTransform(getRequest);
|
||||
assertThat(request.getParameters(), hasEntry("from", "0"));
|
||||
assertEquals(null, request.getParameters().get("size"));
|
||||
|
||||
getRequest.setPageParams(new PageParams(null, 50));
|
||||
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
|
||||
request = TransformRequestConverters.getTransform(getRequest);
|
||||
assertEquals(null, request.getParameters().get("from"));
|
||||
assertThat(request.getParameters(), hasEntry("size", "50"));
|
||||
|
||||
getRequest.setPageParams(new PageParams(0, 10));
|
||||
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
|
||||
request = TransformRequestConverters.getTransform(getRequest);
|
||||
assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10")));
|
||||
|
||||
getRequest.setAllowNoMatch(false);
|
||||
request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
|
||||
request = TransformRequestConverters.getTransform(getRequest);
|
||||
assertThat(request.getParameters(), hasEntry("allow_no_match", "false"));
|
||||
}
|
||||
|
||||
public void testGetDataFrameTransform_givenMulitpleIds() {
|
||||
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("foo", "bar", "baz");
|
||||
Request request = DataFrameRequestConverters.getDataFrameTransform(getRequest);
|
||||
GetTransformRequest getRequest = new GetTransformRequest("foo", "bar", "baz");
|
||||
Request request = TransformRequestConverters.getTransform(getRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo,bar,baz"));
|
||||
|
|
|
@ -21,12 +21,10 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.elasticsearch.client.migration.DeprecationInfoRequest;
|
||||
import org.elasticsearch.client.migration.DeprecationInfoResponse;
|
||||
import org.elasticsearch.client.tasks.TaskSubmissionResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.function.BooleanSupplier;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -42,20 +40,4 @@ public class MigrationIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(response.getNodeSettingsIssues().size(), equalTo(0));
|
||||
assertThat(response.getMlSettingsIssues().size(), equalTo(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* Using low-level api as high-level-rest-client's getTaskById work is in progress.
|
||||
* TODO revisit once that work is finished
|
||||
*/
|
||||
private BooleanSupplier checkCompletionStatus(TaskSubmissionResponse upgrade) {
|
||||
return () -> {
|
||||
try {
|
||||
Response response = client().performRequest(new Request("GET", "/_tasks/" + upgrade.getTask()));
|
||||
return (boolean) entityAsMap(response).get("completed");
|
||||
} catch (IOException e) {
|
||||
fail(e.getMessage());
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -436,6 +436,47 @@ public class ReindexIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testDeleteByQueryTask() throws Exception {
|
||||
final String sourceIndex = "source456";
|
||||
{
|
||||
// Prepare
|
||||
Settings settings = Settings.builder()
|
||||
.put("number_of_shards", 1)
|
||||
.put("number_of_replicas", 0)
|
||||
.build();
|
||||
createIndex(sourceIndex, settings);
|
||||
assertEquals(
|
||||
RestStatus.OK,
|
||||
highLevelClient().bulk(
|
||||
new BulkRequest()
|
||||
.add(new IndexRequest(sourceIndex).id("1")
|
||||
.source(Collections.singletonMap("foo", 1), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex).id("2")
|
||||
.source(Collections.singletonMap("foo", 2), XContentType.JSON))
|
||||
.add(new IndexRequest(sourceIndex).id("3")
|
||||
.source(Collections.singletonMap("foo", 3), XContentType.JSON))
|
||||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE),
|
||||
RequestOptions.DEFAULT
|
||||
).status()
|
||||
);
|
||||
}
|
||||
{
|
||||
// tag::submit-delete_by_query-task
|
||||
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
|
||||
deleteByQueryRequest.indices(sourceIndex);
|
||||
deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1"));
|
||||
deleteByQueryRequest.setRefresh(true);
|
||||
|
||||
TaskSubmissionResponse deleteByQuerySubmission = highLevelClient()
|
||||
.submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT);
|
||||
|
||||
String taskId = deleteByQuerySubmission.getTask();
|
||||
// end::submit-delete_by_query-task
|
||||
|
||||
assertBusy(checkCompletionStatus(client(), taskId));
|
||||
}
|
||||
}
|
||||
|
||||
private static TaskId findTaskToRethrottle(String actionName) throws IOException {
|
||||
long start = System.nanoTime();
|
||||
ListTasksRequest request = new ListTasksRequest();
|
||||
|
|
|
@ -582,6 +582,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
setRandomIndicesOptions(deleteByQueryRequest::setIndicesOptions, deleteByQueryRequest::indicesOptions, expectedParams);
|
||||
setRandomTimeout(deleteByQueryRequest::setTimeout, ReplicationRequest.DEFAULT_TIMEOUT, expectedParams);
|
||||
expectedParams.put("wait_for_completion", Boolean.TRUE.toString());
|
||||
Request request = RequestConverters.deleteByQuery(deleteByQueryRequest);
|
||||
StringJoiner joiner = new StringJoiner("/", "/", "");
|
||||
joiner.add(String.join(",", deleteByQueryRequest.indices()));
|
||||
|
|
|
@ -65,6 +65,10 @@ import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.Binar
|
|||
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
|
||||
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
|
||||
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
|
||||
import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree;
|
||||
import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding;
|
||||
import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding;
|
||||
import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding;
|
||||
import org.elasticsearch.client.transform.transforms.SyncConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
|
@ -95,6 +99,7 @@ import org.elasticsearch.test.ESTestCase;
|
|||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi;
|
||||
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -676,7 +681,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
|
||||
public void testProvidedNamedXContents() {
|
||||
List<NamedXContentRegistry.Entry> namedXContents = RestHighLevelClient.getProvidedNamedXContents();
|
||||
assertEquals(37, namedXContents.size());
|
||||
assertEquals(41, namedXContents.size());
|
||||
Map<Class<?>, Integer> categories = new HashMap<>();
|
||||
List<String> names = new ArrayList<>();
|
||||
for (NamedXContentRegistry.Entry namedXContent : namedXContents) {
|
||||
|
@ -686,7 +691,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
categories.put(namedXContent.categoryClass, counter + 1);
|
||||
}
|
||||
}
|
||||
assertEquals("Had: " + categories, 9, categories.size());
|
||||
assertEquals("Had: " + categories, 11, categories.size());
|
||||
assertEquals(Integer.valueOf(3), categories.get(Aggregation.class));
|
||||
assertTrue(names.contains(ChildrenAggregationBuilder.NAME));
|
||||
assertTrue(names.contains(MatrixStatsAggregationBuilder.NAME));
|
||||
|
@ -733,6 +738,10 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
ConfusionMatrixMetric.NAME,
|
||||
MeanSquaredErrorMetric.NAME,
|
||||
RSquaredMetric.NAME));
|
||||
assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.inference.preprocessing.PreProcessor.class));
|
||||
assertThat(names, hasItems(FrequencyEncoding.NAME, OneHotEncoding.NAME, TargetMeanEncoding.NAME));
|
||||
assertEquals(Integer.valueOf(1), categories.get(org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel.class));
|
||||
assertThat(names, hasItems(Tree.NAME));
|
||||
}
|
||||
|
||||
public void testApiNamingConventions() throws Exception {
|
||||
|
@ -838,7 +847,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
apiName.startsWith("index_lifecycle.") == false &&
|
||||
apiName.startsWith("ccr.") == false &&
|
||||
apiName.startsWith("enrich.") == false &&
|
||||
apiName.startsWith("data_frame") == false &&
|
||||
apiName.startsWith("transform.") == false &&
|
||||
apiName.endsWith("freeze") == false &&
|
||||
apiName.endsWith("reload_analyzers") == false &&
|
||||
// IndicesClientIT.getIndexTemplate should be renamed "getTemplate" in version 8.0 when we
|
||||
|
|
|
@ -190,7 +190,7 @@ public class SecurityIT extends ESRestHighLevelClientTestCase {
|
|||
.name(roleName)
|
||||
.clusterPrivileges(randomSubsetOf(randomInt(3), Role.ClusterPrivilegeName.ALL_ARRAY))
|
||||
.indicesPrivileges(
|
||||
randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom(randomAlphaOfLength(3))))
|
||||
randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom("{\"match_all\": {}}")))
|
||||
.applicationResourcePrivileges(randomArray(3, ApplicationResourcePrivileges[]::new,
|
||||
() -> ApplicationResourcePrivilegesTests.createNewRandom(randomAlphaOfLength(3).toLowerCase(Locale.ROOT))))
|
||||
.runAsPrivilege(randomArray(3, String[]::new, () -> randomAlphaOfLength(3)));
|
||||
|
|
|
@ -28,24 +28,24 @@ import org.elasticsearch.client.core.AcknowledgedResponse;
|
|||
import org.elasticsearch.client.core.PageParams;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexResponse;
|
||||
import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse;
|
||||
import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.PutDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.StopDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformStats;
|
||||
import org.elasticsearch.client.transform.DeleteTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformResponse;
|
||||
import org.elasticsearch.client.transform.GetTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformStatsResponse;
|
||||
import org.elasticsearch.client.transform.PreviewTransformRequest;
|
||||
import org.elasticsearch.client.transform.PreviewTransformResponse;
|
||||
import org.elasticsearch.client.transform.PutTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartTransformResponse;
|
||||
import org.elasticsearch.client.transform.StopTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopTransformResponse;
|
||||
import org.elasticsearch.client.transform.UpdateTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateTransformResponse;
|
||||
import org.elasticsearch.client.transform.transforms.TransformIndexerStats;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
|
||||
import org.elasticsearch.client.transform.transforms.TransformStats;
|
||||
import org.elasticsearch.client.transform.transforms.DestConfig;
|
||||
import org.elasticsearch.client.transform.transforms.SourceConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
|
||||
|
@ -79,7 +79,7 @@ import static org.hamcrest.Matchers.hasSize;
|
|||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.oneOf;
|
||||
|
||||
public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
||||
public class TransformIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
private List<String> transformsToClean = new ArrayList<>();
|
||||
|
||||
|
@ -147,13 +147,13 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
@After
|
||||
public void cleanUpTransforms() throws Exception {
|
||||
for (String transformId : transformsToClean) {
|
||||
highLevelClient().dataFrame().stopDataFrameTransform(
|
||||
new StopDataFrameTransformRequest(transformId, Boolean.TRUE, null), RequestOptions.DEFAULT);
|
||||
highLevelClient().transform().stopTransform(
|
||||
new StopTransformRequest(transformId, Boolean.TRUE, null), RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
for (String transformId : transformsToClean) {
|
||||
highLevelClient().dataFrame().deleteDataFrameTransform(
|
||||
new DeleteDataFrameTransformRequest(transformId), RequestOptions.DEFAULT);
|
||||
highLevelClient().transform().deleteTransform(
|
||||
new DeleteTransformRequest(transformId), RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
transformsToClean = new ArrayList<>();
|
||||
|
@ -165,21 +165,21 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
createIndex(sourceIndex);
|
||||
|
||||
String id = "test-crud";
|
||||
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
|
||||
TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
|
||||
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
|
||||
client::putDataFrameTransformAsync);
|
||||
TransformClient client = highLevelClient().transform();
|
||||
AcknowledgedResponse ack = execute(new PutTransformRequest(transform), client::putTransform,
|
||||
client::putTransformAsync);
|
||||
assertTrue(ack.isAcknowledged());
|
||||
|
||||
ack = execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform,
|
||||
client::deleteDataFrameTransformAsync);
|
||||
ack = execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform,
|
||||
client::deleteTransformAsync);
|
||||
assertTrue(ack.isAcknowledged());
|
||||
|
||||
// The second delete should fail
|
||||
ElasticsearchStatusException deleteError = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform,
|
||||
client::deleteDataFrameTransformAsync));
|
||||
() -> execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform,
|
||||
client::deleteTransformAsync));
|
||||
assertThat(deleteError.getMessage(), containsString("Transform with id [test-crud] could not be found"));
|
||||
}
|
||||
|
||||
|
@ -188,25 +188,25 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
createIndex(sourceIndex);
|
||||
|
||||
String id = "test-update";
|
||||
DataFrameTransformConfig transform = validDataFrameTransformConfigBuilder(id, sourceIndex, "pivot-dest")
|
||||
TransformConfig transform = validDataFrameTransformConfigBuilder(id, sourceIndex, "pivot-dest")
|
||||
.setSyncConfig(new TimeSyncConfig("timefield", TimeValue.timeValueSeconds(60)))
|
||||
.build();
|
||||
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,
|
||||
client::putDataFrameTransformAsync);
|
||||
TransformClient client = highLevelClient().transform();
|
||||
AcknowledgedResponse ack = execute(new PutTransformRequest(transform), client::putTransform,
|
||||
client::putTransformAsync);
|
||||
assertTrue(ack.isAcknowledged());
|
||||
|
||||
String updatedDescription = "my new description";
|
||||
DataFrameTransformConfigUpdate update = DataFrameTransformConfigUpdate.builder().setDescription(updatedDescription).build();
|
||||
UpdateDataFrameTransformResponse response = execute(
|
||||
new UpdateDataFrameTransformRequest(update, id), client::updateDataFrameTransform,
|
||||
client::updateDataFrameTransformAsync);
|
||||
TransformConfigUpdate update = TransformConfigUpdate.builder().setDescription(updatedDescription).build();
|
||||
UpdateTransformResponse response = execute(
|
||||
new UpdateTransformRequest(update, id), client::updateTransform,
|
||||
client::updateTransformAsync);
|
||||
assertThat(response.getTransformConfiguration().getDescription(), equalTo(updatedDescription));
|
||||
|
||||
ElasticsearchStatusException updateError = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> execute(new UpdateDataFrameTransformRequest(update, "missing-transform"), client::updateDataFrameTransform,
|
||||
client::updateDataFrameTransformAsync));
|
||||
() -> execute(new UpdateTransformRequest(update, "missing-transform"), client::updateTransform,
|
||||
client::updateTransformAsync));
|
||||
assertThat(updateError.getMessage(), containsString("Transform with id [missing-transform] could not be found"));
|
||||
}
|
||||
|
||||
|
@ -214,15 +214,15 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
String sourceIndex = "missing-source-index";
|
||||
|
||||
String id = "test-with-defer";
|
||||
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
PutDataFrameTransformRequest request = new PutDataFrameTransformRequest(transform);
|
||||
TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
|
||||
TransformClient client = highLevelClient().transform();
|
||||
PutTransformRequest request = new PutTransformRequest(transform);
|
||||
request.setDeferValidation(true);
|
||||
AcknowledgedResponse ack = execute(request, client::putDataFrameTransform, client::putDataFrameTransformAsync);
|
||||
AcknowledgedResponse ack = execute(request, client::putTransform, client::putTransformAsync);
|
||||
assertTrue(ack.isAcknowledged());
|
||||
|
||||
ack = execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform,
|
||||
client::deleteDataFrameTransformAsync);
|
||||
ack = execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform,
|
||||
client::deleteTransformAsync);
|
||||
assertTrue(ack.isAcknowledged());
|
||||
}
|
||||
|
||||
|
@ -231,14 +231,14 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
createIndex(sourceIndex);
|
||||
|
||||
String id = "test-get";
|
||||
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
|
||||
TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
|
||||
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
TransformClient client = highLevelClient().transform();
|
||||
putTransform(transform);
|
||||
|
||||
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest(id);
|
||||
GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform,
|
||||
client::getDataFrameTransformAsync);
|
||||
GetTransformRequest getRequest = new GetTransformRequest(id);
|
||||
GetTransformResponse getResponse = execute(getRequest, client::getTransform,
|
||||
client::getTransformAsync);
|
||||
assertNull(getResponse.getInvalidTransforms());
|
||||
assertThat(getResponse.getTransformConfigurations(), hasSize(1));
|
||||
assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(0).getId());
|
||||
|
@ -248,40 +248,40 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
String sourceIndex = "transform-source";
|
||||
createIndex(sourceIndex);
|
||||
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
TransformClient client = highLevelClient().transform();
|
||||
|
||||
DataFrameTransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1");
|
||||
TransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1");
|
||||
putTransform(transform);
|
||||
|
||||
transform = validDataFrameTransformConfig("test-get-all-2", sourceIndex, "pivot-dest-2");
|
||||
putTransform(transform);
|
||||
|
||||
GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("_all");
|
||||
GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform,
|
||||
client::getDataFrameTransformAsync);
|
||||
GetTransformRequest getRequest = new GetTransformRequest("_all");
|
||||
GetTransformResponse getResponse = execute(getRequest, client::getTransform,
|
||||
client::getTransformAsync);
|
||||
assertNull(getResponse.getInvalidTransforms());
|
||||
assertThat(getResponse.getTransformConfigurations(), hasSize(2));
|
||||
assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(1).getId());
|
||||
|
||||
getRequest.setPageParams(new PageParams(0,1));
|
||||
getResponse = execute(getRequest, client::getDataFrameTransform,
|
||||
client::getDataFrameTransformAsync);
|
||||
getResponse = execute(getRequest, client::getTransform,
|
||||
client::getTransformAsync);
|
||||
assertNull(getResponse.getInvalidTransforms());
|
||||
assertThat(getResponse.getTransformConfigurations(), hasSize(1));
|
||||
|
||||
GetDataFrameTransformRequest getMulitple = new GetDataFrameTransformRequest("test-get-all-1", "test-get-all-2");
|
||||
getResponse = execute(getMulitple, client::getDataFrameTransform,
|
||||
client::getDataFrameTransformAsync);
|
||||
GetTransformRequest getMulitple = new GetTransformRequest("test-get-all-1", "test-get-all-2");
|
||||
getResponse = execute(getMulitple, client::getTransform,
|
||||
client::getTransformAsync);
|
||||
assertNull(getResponse.getInvalidTransforms());
|
||||
assertThat(getResponse.getTransformConfigurations(), hasSize(2));
|
||||
}
|
||||
|
||||
public void testGetMissingTransform() {
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
TransformClient client = highLevelClient().transform();
|
||||
|
||||
ElasticsearchStatusException missingError = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> execute(new GetDataFrameTransformRequest("unknown"), client::getDataFrameTransform,
|
||||
client::getDataFrameTransformAsync));
|
||||
() -> execute(new GetTransformRequest("unknown"), client::getTransform,
|
||||
client::getTransformAsync));
|
||||
assertThat(missingError.status(), equalTo(RestStatus.NOT_FOUND));
|
||||
}
|
||||
|
||||
|
@ -290,39 +290,39 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
createIndex(sourceIndex);
|
||||
|
||||
String id = "test-stop-start";
|
||||
DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
|
||||
TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest");
|
||||
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
TransformClient client = highLevelClient().transform();
|
||||
putTransform(transform);
|
||||
|
||||
StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id);
|
||||
StartDataFrameTransformResponse startResponse =
|
||||
execute(startRequest, client::startDataFrameTransform, client::startDataFrameTransformAsync);
|
||||
StartTransformRequest startRequest = new StartTransformRequest(id);
|
||||
StartTransformResponse startResponse =
|
||||
execute(startRequest, client::startTransform, client::startTransformAsync);
|
||||
assertTrue(startResponse.isAcknowledged());
|
||||
assertThat(startResponse.getNodeFailures(), empty());
|
||||
assertThat(startResponse.getTaskFailures(), empty());
|
||||
|
||||
GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id),
|
||||
client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync);
|
||||
GetTransformStatsResponse statsResponse = execute(new GetTransformStatsRequest(id),
|
||||
client::getTransformStats, client::getTransformStatsAsync);
|
||||
assertThat(statsResponse.getTransformsStats(), hasSize(1));
|
||||
DataFrameTransformStats.State taskState = statsResponse.getTransformsStats().get(0).getState();
|
||||
TransformStats.State taskState = statsResponse.getTransformsStats().get(0).getState();
|
||||
|
||||
// Since we are non-continuous, the transform could auto-stop between being started earlier and us gathering the statistics
|
||||
assertThat(taskState, oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING,
|
||||
DataFrameTransformStats.State.STOPPING, DataFrameTransformStats.State.STOPPED));
|
||||
assertThat(taskState, oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING,
|
||||
TransformStats.State.STOPPING, TransformStats.State.STOPPED));
|
||||
|
||||
StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null);
|
||||
StopDataFrameTransformResponse stopResponse =
|
||||
execute(stopRequest, client::stopDataFrameTransform, client::stopDataFrameTransformAsync);
|
||||
StopTransformRequest stopRequest = new StopTransformRequest(id, Boolean.TRUE, null);
|
||||
StopTransformResponse stopResponse =
|
||||
execute(stopRequest, client::stopTransform, client::stopTransformAsync);
|
||||
assertTrue(stopResponse.isAcknowledged());
|
||||
assertThat(stopResponse.getNodeFailures(), empty());
|
||||
assertThat(stopResponse.getTaskFailures(), empty());
|
||||
|
||||
// Calling stop with wait_for_completion assures that we will be in the `STOPPED` state for the transform task
|
||||
statsResponse = execute(new GetDataFrameTransformStatsRequest(id),
|
||||
client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync);
|
||||
statsResponse = execute(new GetTransformStatsRequest(id),
|
||||
client::getTransformStats, client::getTransformStatsAsync);
|
||||
taskState = statsResponse.getTransformsStats().get(0).getState();
|
||||
assertThat(taskState, is(DataFrameTransformStats.State.STOPPED));
|
||||
assertThat(taskState, is(TransformStats.State.STOPPED));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -331,12 +331,12 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
createIndex(sourceIndex);
|
||||
indexData(sourceIndex);
|
||||
|
||||
DataFrameTransformConfig transform = validDataFrameTransformConfig("test-preview", sourceIndex, null);
|
||||
TransformConfig transform = validDataFrameTransformConfig("test-preview", sourceIndex, null);
|
||||
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
PreviewDataFrameTransformResponse preview = execute(new PreviewDataFrameTransformRequest(transform),
|
||||
client::previewDataFrameTransform,
|
||||
client::previewDataFrameTransformAsync);
|
||||
TransformClient client = highLevelClient().transform();
|
||||
PreviewTransformResponse preview = execute(new PreviewTransformRequest(transform),
|
||||
client::previewTransform,
|
||||
client::previewTransformAsync);
|
||||
|
||||
List<Map<String, Object>> docs = preview.getDocs();
|
||||
assertThat(docs, hasSize(2));
|
||||
|
@ -355,11 +355,11 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(fields.get("avg_rating"), equalTo(Collections.singletonMap("type", "double")));
|
||||
}
|
||||
|
||||
private DataFrameTransformConfig validDataFrameTransformConfig(String id, String source, String destination) {
|
||||
private TransformConfig validDataFrameTransformConfig(String id, String source, String destination) {
|
||||
return validDataFrameTransformConfigBuilder(id, source, destination).build();
|
||||
}
|
||||
|
||||
private DataFrameTransformConfig.Builder validDataFrameTransformConfigBuilder(String id, String source, String destination) {
|
||||
private TransformConfig.Builder validDataFrameTransformConfigBuilder(String id, String source, String destination) {
|
||||
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
|
||||
TermsGroupSource.builder().setField("user_id").build()).build();
|
||||
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
|
||||
|
@ -368,7 +368,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
DestConfig destConfig = (destination != null) ? DestConfig.builder().setIndex(destination).build() : null;
|
||||
|
||||
return DataFrameTransformConfig.builder()
|
||||
return TransformConfig.builder()
|
||||
.setId(id)
|
||||
.setSource(SourceConfig.builder().setIndex(source).setQuery(new MatchAllQueryBuilder()).build())
|
||||
.setDest(destConfig)
|
||||
|
@ -389,7 +389,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregations(aggBuilder).build();
|
||||
|
||||
String id = "test-get-stats";
|
||||
DataFrameTransformConfig transform = DataFrameTransformConfig.builder()
|
||||
TransformConfig transform = TransformConfig.builder()
|
||||
.setId(id)
|
||||
.setSource(SourceConfig.builder().setIndex(sourceIndex).setQuery(new MatchAllQueryBuilder()).build())
|
||||
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
|
||||
|
@ -397,17 +397,17 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
.setDescription("transform for testing stats")
|
||||
.build();
|
||||
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
TransformClient client = highLevelClient().transform();
|
||||
putTransform(transform);
|
||||
|
||||
GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id),
|
||||
client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync);
|
||||
GetTransformStatsResponse statsResponse = execute(new GetTransformStatsRequest(id),
|
||||
client::getTransformStats, client::getTransformStatsAsync);
|
||||
|
||||
assertEquals(1, statsResponse.getTransformsStats().size());
|
||||
DataFrameTransformStats stats = statsResponse.getTransformsStats().get(0);
|
||||
assertEquals(DataFrameTransformStats.State.STOPPED, stats.getState());
|
||||
TransformStats stats = statsResponse.getTransformsStats().get(0);
|
||||
assertEquals(TransformStats.State.STOPPED, stats.getState());
|
||||
|
||||
DataFrameIndexerTransformStats zeroIndexerStats = new DataFrameIndexerTransformStats(
|
||||
TransformIndexerStats zeroIndexerStats = new TransformIndexerStats(
|
||||
0L,
|
||||
0L,
|
||||
0L,
|
||||
|
@ -424,25 +424,25 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(zeroIndexerStats, stats.getIndexerStats());
|
||||
|
||||
// start the transform
|
||||
StartDataFrameTransformResponse startTransformResponse = execute(new StartDataFrameTransformRequest(id),
|
||||
client::startDataFrameTransform,
|
||||
client::startDataFrameTransformAsync);
|
||||
StartTransformResponse startTransformResponse = execute(new StartTransformRequest(id),
|
||||
client::startTransform,
|
||||
client::startTransformAsync);
|
||||
assertThat(startTransformResponse.isAcknowledged(), is(true));
|
||||
assertBusy(() -> {
|
||||
GetDataFrameTransformStatsResponse response = execute(new GetDataFrameTransformStatsRequest(id),
|
||||
client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync);
|
||||
DataFrameTransformStats stateAndStats = response.getTransformsStats().get(0);
|
||||
GetTransformStatsResponse response = execute(new GetTransformStatsRequest(id),
|
||||
client::getTransformStats, client::getTransformStatsAsync);
|
||||
TransformStats stateAndStats = response.getTransformsStats().get(0);
|
||||
assertNotEquals(zeroIndexerStats, stateAndStats.getIndexerStats());
|
||||
assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING,
|
||||
DataFrameTransformStats.State.STOPPING, DataFrameTransformStats.State.STOPPED));
|
||||
assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING,
|
||||
TransformStats.State.STOPPING, TransformStats.State.STOPPED));
|
||||
assertThat(stateAndStats.getReason(), is(nullValue()));
|
||||
});
|
||||
}
|
||||
|
||||
void putTransform(DataFrameTransformConfig config) throws IOException {
|
||||
DataFrameClient client = highLevelClient().dataFrame();
|
||||
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(config), client::putDataFrameTransform,
|
||||
client::putDataFrameTransformAsync);
|
||||
void putTransform(TransformConfig config) throws IOException {
|
||||
TransformClient client = highLevelClient().transform();
|
||||
AcknowledgedResponse ack = execute(new PutTransformRequest(config), client::putTransform,
|
||||
client::putTransformAsync);
|
||||
assertTrue(ack.isAcknowledged());
|
||||
transformsToClean.add(config.getId());
|
||||
}
|
|
@ -28,25 +28,25 @@ import org.elasticsearch.client.core.AcknowledgedResponse;
|
|||
import org.elasticsearch.client.core.PageParams;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexResponse;
|
||||
import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse;
|
||||
import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.PutDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.StopDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformProgress;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformStats;
|
||||
import org.elasticsearch.client.transform.DeleteTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformResponse;
|
||||
import org.elasticsearch.client.transform.GetTransformStatsRequest;
|
||||
import org.elasticsearch.client.transform.GetTransformStatsResponse;
|
||||
import org.elasticsearch.client.transform.PreviewTransformRequest;
|
||||
import org.elasticsearch.client.transform.PreviewTransformResponse;
|
||||
import org.elasticsearch.client.transform.PutTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartTransformRequest;
|
||||
import org.elasticsearch.client.transform.StartTransformResponse;
|
||||
import org.elasticsearch.client.transform.StopTransformRequest;
|
||||
import org.elasticsearch.client.transform.StopTransformResponse;
|
||||
import org.elasticsearch.client.transform.UpdateTransformRequest;
|
||||
import org.elasticsearch.client.transform.UpdateTransformResponse;
|
||||
import org.elasticsearch.client.transform.transforms.TransformIndexerStats;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
|
||||
import org.elasticsearch.client.transform.transforms.TransformProgress;
|
||||
import org.elasticsearch.client.transform.transforms.TransformStats;
|
||||
import org.elasticsearch.client.transform.transforms.DestConfig;
|
||||
import org.elasticsearch.client.transform.transforms.NodeAttributes;
|
||||
import org.elasticsearch.client.transform.transforms.QueryConfig;
|
||||
|
@ -73,20 +73,20 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
||||
public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
public class TransformDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
private List<String> transformsToClean = new ArrayList<>();
|
||||
|
||||
@After
|
||||
public void cleanUpTransforms() throws Exception {
|
||||
for (String transformId : transformsToClean) {
|
||||
highLevelClient().dataFrame().stopDataFrameTransform(
|
||||
new StopDataFrameTransformRequest(transformId, Boolean.TRUE, TimeValue.timeValueSeconds(20)), RequestOptions.DEFAULT);
|
||||
highLevelClient().transform().stopTransform(
|
||||
new StopTransformRequest(transformId, Boolean.TRUE, TimeValue.timeValueSeconds(20)), RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
for (String transformId : transformsToClean) {
|
||||
highLevelClient().dataFrame().deleteDataFrameTransform(
|
||||
new DeleteDataFrameTransformRequest(transformId), RequestOptions.DEFAULT);
|
||||
highLevelClient().transform().deleteTransform(
|
||||
new DeleteTransformRequest(transformId), RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
transformsToClean = new ArrayList<>();
|
||||
|
@ -116,7 +116,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
assertTrue(response.isAcknowledged());
|
||||
}
|
||||
|
||||
public void testPutDataFrameTransform() throws IOException, InterruptedException {
|
||||
public void testPutTransform() throws IOException, InterruptedException {
|
||||
createIndex("source-index");
|
||||
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
@ -154,7 +154,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
.build();
|
||||
// end::put-transform-pivot-config
|
||||
// tag::put-transform-config
|
||||
DataFrameTransformConfig transformConfig = DataFrameTransformConfig
|
||||
TransformConfig transformConfig = TransformConfig
|
||||
.builder()
|
||||
.setId("reviewer-avg-rating") // <1>
|
||||
.setSource(sourceConfig) // <2>
|
||||
|
@ -167,14 +167,14 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
|
||||
{
|
||||
// tag::put-transform-request
|
||||
PutDataFrameTransformRequest request =
|
||||
new PutDataFrameTransformRequest(transformConfig); // <1>
|
||||
PutTransformRequest request =
|
||||
new PutTransformRequest(transformConfig); // <1>
|
||||
request.setDeferValidation(false); // <2>
|
||||
// end::put-transform-request
|
||||
|
||||
// tag::put-transform-execute
|
||||
AcknowledgedResponse response =
|
||||
client.dataFrame().putDataFrameTransform(
|
||||
client.transform().putTransform(
|
||||
request, RequestOptions.DEFAULT);
|
||||
// end::put-transform-execute
|
||||
transformsToClean.add(request.getConfig().getId());
|
||||
|
@ -182,13 +182,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
assertTrue(response.isAcknowledged());
|
||||
}
|
||||
{
|
||||
DataFrameTransformConfig configWithDifferentId = DataFrameTransformConfig.builder()
|
||||
TransformConfig configWithDifferentId = TransformConfig.builder()
|
||||
.setId("reviewer-avg-rating2")
|
||||
.setSource(transformConfig.getSource())
|
||||
.setDest(transformConfig.getDestination())
|
||||
.setPivotConfig(transformConfig.getPivotConfig())
|
||||
.build();
|
||||
PutDataFrameTransformRequest request = new PutDataFrameTransformRequest(configWithDifferentId);
|
||||
PutTransformRequest request = new PutTransformRequest(configWithDifferentId);
|
||||
|
||||
// tag::put-transform-execute-listener
|
||||
ActionListener<AcknowledgedResponse> listener =
|
||||
|
@ -210,7 +210,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::put-transform-execute-async
|
||||
client.dataFrame().putDataFrameTransformAsync(
|
||||
client.transform().putTransformAsync(
|
||||
request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::put-transform-execute-async
|
||||
|
||||
|
@ -231,7 +231,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
|
||||
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
|
||||
|
||||
DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder()
|
||||
TransformConfig transformConfig = TransformConfig.builder()
|
||||
.setId("my-transform-to-update")
|
||||
.setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build())
|
||||
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
|
||||
|
@ -239,11 +239,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
.setSyncConfig(new TimeSyncConfig("time-field", TimeValue.timeValueSeconds(120)))
|
||||
.build();
|
||||
|
||||
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT);
|
||||
client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT);
|
||||
transformsToClean.add(transformConfig.getId());
|
||||
|
||||
// tag::update-transform-config
|
||||
DataFrameTransformConfigUpdate update = DataFrameTransformConfigUpdate
|
||||
TransformConfigUpdate update = TransformConfigUpdate
|
||||
.builder()
|
||||
.setSource(SourceConfig.builder()
|
||||
.setIndex("source-data")
|
||||
|
@ -260,32 +260,32 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
|
||||
{
|
||||
// tag::update-transform-request
|
||||
UpdateDataFrameTransformRequest request =
|
||||
new UpdateDataFrameTransformRequest(
|
||||
UpdateTransformRequest request =
|
||||
new UpdateTransformRequest(
|
||||
update, // <1>
|
||||
"my-transform-to-update"); // <2>
|
||||
request.setDeferValidation(false); // <3>
|
||||
// end::update-transform-request
|
||||
|
||||
// tag::update-transform-execute
|
||||
UpdateDataFrameTransformResponse response =
|
||||
client.dataFrame().updateDataFrameTransform(request,
|
||||
UpdateTransformResponse response =
|
||||
client.transform().updateTransform(request,
|
||||
RequestOptions.DEFAULT);
|
||||
DataFrameTransformConfig updatedConfig =
|
||||
TransformConfig updatedConfig =
|
||||
response.getTransformConfiguration();
|
||||
// end::update-transform-execute
|
||||
|
||||
assertThat(updatedConfig.getDescription(), equalTo("This is my updated transform"));
|
||||
}
|
||||
{
|
||||
UpdateDataFrameTransformRequest request = new UpdateDataFrameTransformRequest(update,
|
||||
UpdateTransformRequest request = new UpdateTransformRequest(update,
|
||||
"my-transform-to-update");
|
||||
|
||||
// tag::update-transform-execute-listener
|
||||
ActionListener<UpdateDataFrameTransformResponse> listener =
|
||||
new ActionListener<UpdateDataFrameTransformResponse>() {
|
||||
ActionListener<UpdateTransformResponse> listener =
|
||||
new ActionListener<UpdateTransformResponse>() {
|
||||
@Override
|
||||
public void onResponse(UpdateDataFrameTransformResponse response) {
|
||||
public void onResponse(UpdateTransformResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
|
@ -301,7 +301,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::update-transform-execute-async
|
||||
client.dataFrame().updateDataFrameTransformAsync(
|
||||
client.transform().updateTransformAsync(
|
||||
request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::update-transform-execute-async
|
||||
|
||||
|
@ -322,20 +322,20 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
|
||||
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
|
||||
|
||||
DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder()
|
||||
TransformConfig transformConfig = TransformConfig.builder()
|
||||
.setId("mega-transform")
|
||||
.setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build())
|
||||
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
|
||||
.setPivotConfig(pivotConfig)
|
||||
.build();
|
||||
|
||||
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT);
|
||||
client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT);
|
||||
transformsToClean.add(transformConfig.getId());
|
||||
|
||||
{
|
||||
// tag::start-transform-request
|
||||
StartDataFrameTransformRequest request =
|
||||
new StartDataFrameTransformRequest("mega-transform"); // <1>
|
||||
StartTransformRequest request =
|
||||
new StartTransformRequest("mega-transform"); // <1>
|
||||
// end::start-transform-request
|
||||
|
||||
// tag::start-transform-request-options
|
||||
|
@ -343,8 +343,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
// end::start-transform-request-options
|
||||
|
||||
// tag::start-transform-execute
|
||||
StartDataFrameTransformResponse response =
|
||||
client.dataFrame().startDataFrameTransform(
|
||||
StartTransformResponse response =
|
||||
client.transform().startTransform(
|
||||
request, RequestOptions.DEFAULT);
|
||||
// end::start-transform-execute
|
||||
|
||||
|
@ -352,8 +352,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
}
|
||||
{
|
||||
// tag::stop-transform-request
|
||||
StopDataFrameTransformRequest request =
|
||||
new StopDataFrameTransformRequest("mega-transform"); // <1>
|
||||
StopTransformRequest request =
|
||||
new StopTransformRequest("mega-transform"); // <1>
|
||||
// end::stop-transform-request
|
||||
|
||||
// tag::stop-transform-request-options
|
||||
|
@ -363,8 +363,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
// end::stop-transform-request-options
|
||||
|
||||
// tag::stop-transform-execute
|
||||
StopDataFrameTransformResponse response =
|
||||
client.dataFrame().stopDataFrameTransform(
|
||||
StopTransformResponse response =
|
||||
client.transform().stopTransform(
|
||||
request, RequestOptions.DEFAULT);
|
||||
// end::stop-transform-execute
|
||||
|
||||
|
@ -372,11 +372,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
}
|
||||
{
|
||||
// tag::start-transform-execute-listener
|
||||
ActionListener<StartDataFrameTransformResponse> listener =
|
||||
new ActionListener<StartDataFrameTransformResponse>() {
|
||||
ActionListener<StartTransformResponse> listener =
|
||||
new ActionListener<StartTransformResponse>() {
|
||||
@Override
|
||||
public void onResponse(
|
||||
StartDataFrameTransformResponse response) {
|
||||
StartTransformResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
|
@ -391,9 +391,9 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
StartDataFrameTransformRequest request = new StartDataFrameTransformRequest("mega-transform");
|
||||
StartTransformRequest request = new StartTransformRequest("mega-transform");
|
||||
// tag::start-transform-execute-async
|
||||
client.dataFrame().startDataFrameTransformAsync(
|
||||
client.transform().startTransformAsync(
|
||||
request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::start-transform-execute-async
|
||||
|
||||
|
@ -401,11 +401,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
}
|
||||
{
|
||||
// tag::stop-transform-execute-listener
|
||||
ActionListener<StopDataFrameTransformResponse> listener =
|
||||
new ActionListener<StopDataFrameTransformResponse>() {
|
||||
ActionListener<StopTransformResponse> listener =
|
||||
new ActionListener<StopTransformResponse>() {
|
||||
@Override
|
||||
public void onResponse(
|
||||
StopDataFrameTransformResponse response) {
|
||||
StopTransformResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
|
@ -420,9 +420,9 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
StopDataFrameTransformRequest request = new StopDataFrameTransformRequest("mega-transform");
|
||||
StopTransformRequest request = new StopTransformRequest("mega-transform");
|
||||
// tag::stop-transform-execute-async
|
||||
client.dataFrame().stopDataFrameTransformAsync(
|
||||
client.transform().stopTransformAsync(
|
||||
request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::stop-transform-execute-async
|
||||
|
||||
|
@ -442,7 +442,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
|
||||
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
|
||||
|
||||
DataFrameTransformConfig transformConfig1 = DataFrameTransformConfig.builder()
|
||||
TransformConfig transformConfig1 = TransformConfig.builder()
|
||||
.setId("mega-transform")
|
||||
.setSource(SourceConfig.builder()
|
||||
.setIndex("source-data")
|
||||
|
@ -451,7 +451,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
|
||||
.setPivotConfig(pivotConfig)
|
||||
.build();
|
||||
DataFrameTransformConfig transformConfig2 = DataFrameTransformConfig.builder()
|
||||
TransformConfig transformConfig2 = TransformConfig.builder()
|
||||
.setId("mega-transform2")
|
||||
.setSource(SourceConfig.builder()
|
||||
.setIndex("source-data")
|
||||
|
@ -461,20 +461,20 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
.setPivotConfig(pivotConfig)
|
||||
.build();
|
||||
|
||||
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig1), RequestOptions.DEFAULT);
|
||||
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig2), RequestOptions.DEFAULT);
|
||||
client.transform().putTransform(new PutTransformRequest(transformConfig1), RequestOptions.DEFAULT);
|
||||
client.transform().putTransform(new PutTransformRequest(transformConfig2), RequestOptions.DEFAULT);
|
||||
|
||||
{
|
||||
// tag::delete-transform-request
|
||||
DeleteDataFrameTransformRequest request =
|
||||
new DeleteDataFrameTransformRequest("mega-transform"); // <1>
|
||||
DeleteTransformRequest request =
|
||||
new DeleteTransformRequest("mega-transform"); // <1>
|
||||
request.setForce(false); // <2>
|
||||
// end::delete-transform-request
|
||||
|
||||
// tag::delete-transform-execute
|
||||
AcknowledgedResponse response =
|
||||
client.dataFrame()
|
||||
.deleteDataFrameTransform(request, RequestOptions.DEFAULT);
|
||||
client.transform()
|
||||
.deleteTransform(request, RequestOptions.DEFAULT);
|
||||
// end::delete-transform-execute
|
||||
|
||||
assertTrue(response.isAcknowledged());
|
||||
|
@ -499,10 +499,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
DeleteDataFrameTransformRequest request = new DeleteDataFrameTransformRequest("mega-transform2");
|
||||
DeleteTransformRequest request = new DeleteTransformRequest("mega-transform2");
|
||||
|
||||
// tag::delete-transform-execute-async
|
||||
client.dataFrame().deleteDataFrameTransformAsync(
|
||||
client.transform().deleteTransformAsync(
|
||||
request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::delete-transform-execute-async
|
||||
|
||||
|
@ -524,23 +524,23 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
|
||||
|
||||
// tag::preview-transform-request
|
||||
DataFrameTransformConfig transformConfig =
|
||||
DataFrameTransformConfig.forPreview(
|
||||
TransformConfig transformConfig =
|
||||
TransformConfig.forPreview(
|
||||
SourceConfig.builder()
|
||||
.setIndex("source-data")
|
||||
.setQueryConfig(queryConfig)
|
||||
.build(), // <1>
|
||||
pivotConfig); // <2>
|
||||
|
||||
PreviewDataFrameTransformRequest request =
|
||||
new PreviewDataFrameTransformRequest(transformConfig); // <3>
|
||||
PreviewTransformRequest request =
|
||||
new PreviewTransformRequest(transformConfig); // <3>
|
||||
// end::preview-transform-request
|
||||
|
||||
{
|
||||
// tag::preview-transform-execute
|
||||
PreviewDataFrameTransformResponse response =
|
||||
client.dataFrame()
|
||||
.previewDataFrameTransform(request, RequestOptions.DEFAULT);
|
||||
PreviewTransformResponse response =
|
||||
client.transform()
|
||||
.previewTransform(request, RequestOptions.DEFAULT);
|
||||
// end::preview-transform-execute
|
||||
|
||||
assertNotNull(response.getDocs());
|
||||
|
@ -548,10 +548,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
}
|
||||
{
|
||||
// tag::preview-transform-execute-listener
|
||||
ActionListener<PreviewDataFrameTransformResponse> listener =
|
||||
new ActionListener<PreviewDataFrameTransformResponse>() {
|
||||
ActionListener<PreviewTransformResponse> listener =
|
||||
new ActionListener<PreviewTransformResponse>() {
|
||||
@Override
|
||||
public void onResponse(PreviewDataFrameTransformResponse response) {
|
||||
public void onResponse(PreviewTransformResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
|
@ -567,7 +567,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::preview-transform-execute-async
|
||||
client.dataFrame().previewDataFrameTransformAsync(
|
||||
client.transform().previewTransformAsync(
|
||||
request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::preview-transform-execute-async
|
||||
|
||||
|
@ -588,7 +588,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
|
||||
|
||||
String id = "statisitcal-transform";
|
||||
DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder()
|
||||
TransformConfig transformConfig = TransformConfig.builder()
|
||||
.setId(id)
|
||||
.setSource(SourceConfig.builder()
|
||||
.setIndex("source-data")
|
||||
|
@ -597,12 +597,12 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
.setDest(DestConfig.builder().setIndex("pivot-dest").build())
|
||||
.setPivotConfig(pivotConfig)
|
||||
.build();
|
||||
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT);
|
||||
client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT);
|
||||
transformsToClean.add(id);
|
||||
|
||||
// tag::get-transform-stats-request
|
||||
GetDataFrameTransformStatsRequest request =
|
||||
new GetDataFrameTransformStatsRequest(id); // <1>
|
||||
GetTransformStatsRequest request =
|
||||
new GetTransformStatsRequest(id); // <1>
|
||||
// end::get-transform-stats-request
|
||||
|
||||
// tag::get-transform-stats-request-options
|
||||
|
@ -612,38 +612,38 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
|
||||
{
|
||||
// tag::get-transform-stats-execute
|
||||
GetDataFrameTransformStatsResponse response =
|
||||
client.dataFrame()
|
||||
.getDataFrameTransformStats(request, RequestOptions.DEFAULT);
|
||||
GetTransformStatsResponse response =
|
||||
client.transform()
|
||||
.getTransformStats(request, RequestOptions.DEFAULT);
|
||||
// end::get-transform-stats-execute
|
||||
|
||||
assertThat(response.getTransformsStats(), hasSize(1));
|
||||
|
||||
// tag::get-transform-stats-response
|
||||
DataFrameTransformStats stats =
|
||||
TransformStats stats =
|
||||
response.getTransformsStats().get(0); // <1>
|
||||
DataFrameTransformStats.State state =
|
||||
TransformStats.State state =
|
||||
stats.getState(); // <2>
|
||||
DataFrameIndexerTransformStats indexerStats =
|
||||
TransformIndexerStats indexerStats =
|
||||
stats.getIndexerStats(); // <3>
|
||||
DataFrameTransformProgress progress =
|
||||
TransformProgress progress =
|
||||
stats.getCheckpointingInfo()
|
||||
.getNext().getCheckpointProgress(); // <4>
|
||||
NodeAttributes node =
|
||||
stats.getNode(); // <5>
|
||||
// end::get-transform-stats-response
|
||||
|
||||
assertEquals(DataFrameTransformStats.State.STOPPED, state);
|
||||
assertEquals(TransformStats.State.STOPPED, state);
|
||||
assertNotNull(indexerStats);
|
||||
assertNull(progress);
|
||||
}
|
||||
{
|
||||
// tag::get-transform-stats-execute-listener
|
||||
ActionListener<GetDataFrameTransformStatsResponse> listener =
|
||||
new ActionListener<GetDataFrameTransformStatsResponse>() {
|
||||
ActionListener<GetTransformStatsResponse> listener =
|
||||
new ActionListener<GetTransformStatsResponse>() {
|
||||
@Override
|
||||
public void onResponse(
|
||||
GetDataFrameTransformStatsResponse response) {
|
||||
GetTransformStatsResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
|
@ -659,7 +659,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::get-transform-stats-execute-async
|
||||
client.dataFrame().getDataFrameTransformStatsAsync(
|
||||
client.transform().getTransformStatsAsync(
|
||||
request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::get-transform-stats-execute-async
|
||||
|
||||
|
@ -679,7 +679,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
|
||||
|
||||
|
||||
DataFrameTransformConfig putTransformConfig = DataFrameTransformConfig.builder()
|
||||
TransformConfig putTransformConfig = TransformConfig.builder()
|
||||
.setId("mega-transform")
|
||||
.setSource(SourceConfig.builder()
|
||||
.setIndex("source-data")
|
||||
|
@ -690,13 +690,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
.build();
|
||||
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(putTransformConfig), RequestOptions.DEFAULT);
|
||||
client.transform().putTransform(new PutTransformRequest(putTransformConfig), RequestOptions.DEFAULT);
|
||||
transformsToClean.add(putTransformConfig.getId());
|
||||
|
||||
{
|
||||
// tag::get-transform-request
|
||||
GetDataFrameTransformRequest request =
|
||||
new GetDataFrameTransformRequest("mega-transform"); // <1>
|
||||
GetTransformRequest request =
|
||||
new GetTransformRequest("mega-transform"); // <1>
|
||||
// end::get-transform-request
|
||||
|
||||
// tag::get-transform-request-options
|
||||
|
@ -705,13 +705,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
// end::get-transform-request-options
|
||||
|
||||
// tag::get-transform-execute
|
||||
GetDataFrameTransformResponse response =
|
||||
client.dataFrame()
|
||||
.getDataFrameTransform(request, RequestOptions.DEFAULT);
|
||||
GetTransformResponse response =
|
||||
client.transform()
|
||||
.getTransform(request, RequestOptions.DEFAULT);
|
||||
// end::get-transform-execute
|
||||
|
||||
// tag::get-transform-response
|
||||
List<DataFrameTransformConfig> transformConfigs =
|
||||
List<TransformConfig> transformConfigs =
|
||||
response.getTransformConfigurations();
|
||||
// end::get-transform-response
|
||||
|
||||
|
@ -719,10 +719,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
}
|
||||
{
|
||||
// tag::get-transform-execute-listener
|
||||
ActionListener<GetDataFrameTransformResponse> listener =
|
||||
new ActionListener<GetDataFrameTransformResponse>() {
|
||||
ActionListener<GetTransformResponse> listener =
|
||||
new ActionListener<GetTransformResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetDataFrameTransformResponse response) {
|
||||
public void onResponse(GetTransformResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
|
@ -737,10 +737,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
GetDataFrameTransformRequest request = new GetDataFrameTransformRequest("mega-transform");
|
||||
GetTransformRequest request = new GetTransformRequest("mega-transform");
|
||||
|
||||
// tag::get-transform-execute-async
|
||||
client.dataFrame().getDataFrameTransformAsync(
|
||||
client.transform().getTransformAsync(
|
||||
request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::get-transform-execute-async
|
||||
|
|
@ -33,8 +33,4 @@ public class DeleteModelSnapshotRequestTests extends ESTestCase {
|
|||
-> new DeleteModelSnapshotRequest(randomAlphaOfLength(10), null));
|
||||
assertEquals("[snapshot_id] must not be null", ex.getMessage());
|
||||
}
|
||||
|
||||
private DeleteModelSnapshotRequest createTestInstance() {
|
||||
return new DeleteModelSnapshotRequest(randomAlphaOfLength(10), randomAlphaOfLength(10));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.preprocessing;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
|
||||
public class FrequencyEncodingTests extends AbstractXContentTestCase<FrequencyEncoding> {
|
||||
|
||||
@Override
|
||||
protected FrequencyEncoding doParseInstance(XContentParser parser) throws IOException {
|
||||
return FrequencyEncoding.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||
return field -> !field.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FrequencyEncoding createTestInstance() {
|
||||
return createRandom();
|
||||
}
|
||||
|
||||
public static FrequencyEncoding createRandom() {
|
||||
int valuesSize = randomIntBetween(1, 10);
|
||||
Map<String, Double> valueMap = new HashMap<>();
|
||||
for (int i = 0; i < valuesSize; i++) {
|
||||
valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false));
|
||||
}
|
||||
return new FrequencyEncoding(randomAlphaOfLength(10), randomAlphaOfLength(10), valueMap);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.preprocessing;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
|
||||
public class OneHotEncodingTests extends AbstractXContentTestCase<OneHotEncoding> {
|
||||
|
||||
@Override
|
||||
protected OneHotEncoding doParseInstance(XContentParser parser) throws IOException {
|
||||
return OneHotEncoding.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||
return field -> !field.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected OneHotEncoding createTestInstance() {
|
||||
return createRandom();
|
||||
}
|
||||
|
||||
public static OneHotEncoding createRandom() {
|
||||
int valuesSize = randomIntBetween(1, 10);
|
||||
Map<String, String> valueMap = new HashMap<>();
|
||||
for (int i = 0; i < valuesSize; i++) {
|
||||
valueMap.put(randomAlphaOfLength(10), randomAlphaOfLength(10));
|
||||
}
|
||||
return new OneHotEncoding(randomAlphaOfLength(10), valueMap);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.preprocessing;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
|
||||
public class TargetMeanEncodingTests extends AbstractXContentTestCase<TargetMeanEncoding> {
|
||||
|
||||
@Override
|
||||
protected TargetMeanEncoding doParseInstance(XContentParser parser) throws IOException {
|
||||
return TargetMeanEncoding.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||
return field -> !field.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TargetMeanEncoding createTestInstance() {
|
||||
return createRandom();
|
||||
}
|
||||
|
||||
public static TargetMeanEncoding createRandom() {
|
||||
int valuesSize = randomIntBetween(1, 10);
|
||||
Map<String, Double> valueMap = new HashMap<>();
|
||||
for (int i = 0; i < valuesSize; i++) {
|
||||
valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false));
|
||||
}
|
||||
return new TargetMeanEncoding(randomAlphaOfLength(10),
|
||||
randomAlphaOfLength(10),
|
||||
valueMap,
|
||||
randomDoubleBetween(0.0, 1.0, false));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.trainedmodel.tree;
|
||||
|
||||
import org.elasticsearch.client.ml.job.config.Operator;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class TreeNodeTests extends AbstractXContentTestCase<TreeNode> {
|
||||
|
||||
@Override
|
||||
protected TreeNode doParseInstance(XContentParser parser) throws IOException {
|
||||
return TreeNode.fromXContent(parser).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TreeNode createTestInstance() {
|
||||
Integer lft = randomBoolean() ? null : randomInt(100);
|
||||
Integer rgt = randomBoolean() ? randomInt(100) : null;
|
||||
Double threshold = lft != null || randomBoolean() ? randomDouble() : null;
|
||||
Integer featureIndex = lft != null || randomBoolean() ? randomInt(100) : null;
|
||||
return createRandom(randomInt(), lft, rgt, threshold, featureIndex, randomBoolean() ? null : randomFrom(Operator.values())).build();
|
||||
}
|
||||
|
||||
public static TreeNode createRandomLeafNode(double internalValue) {
|
||||
return TreeNode.builder(randomInt(100))
|
||||
.setDefaultLeft(randomBoolean() ? null : randomBoolean())
|
||||
.setLeafValue(internalValue)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static TreeNode.Builder createRandom(int nodeIndex,
|
||||
Integer left,
|
||||
Integer right,
|
||||
Double threshold,
|
||||
Integer featureIndex,
|
||||
Operator operator) {
|
||||
return TreeNode.builder(nodeIndex)
|
||||
.setLeafValue(left == null ? randomDouble() : null)
|
||||
.setDefaultLeft(randomBoolean() ? null : randomBoolean())
|
||||
.setLeftChild(left)
|
||||
.setRightChild(right)
|
||||
.setThreshold(threshold)
|
||||
.setOperator(operator)
|
||||
.setSplitFeature(featureIndex)
|
||||
.setSplitGain(randomBoolean() ? null : randomDouble());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml.inference.trainedmodel.tree;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
|
||||
public class TreeTests extends AbstractXContentTestCase<Tree> {
|
||||
|
||||
@Override
|
||||
protected Tree doParseInstance(XContentParser parser) throws IOException {
|
||||
return Tree.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||
return field -> field.startsWith("feature_names");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tree createTestInstance() {
|
||||
return createRandom();
|
||||
}
|
||||
|
||||
public static Tree createRandom() {
|
||||
return buildRandomTree(randomIntBetween(2, 15), 6);
|
||||
}
|
||||
|
||||
public static Tree buildRandomTree(int numFeatures, int depth) {
|
||||
|
||||
Tree.Builder builder = Tree.builder();
|
||||
List<String> featureNames = new ArrayList<>(numFeatures);
|
||||
for(int i = 0; i < numFeatures; i++) {
|
||||
featureNames.add(randomAlphaOfLength(10));
|
||||
}
|
||||
builder.setFeatureNames(featureNames);
|
||||
|
||||
TreeNode.Builder node = builder.addJunction(0, randomInt(numFeatures), true, randomDouble());
|
||||
List<Integer> childNodes = Arrays.asList(node.getLeftChild(), node.getRightChild());
|
||||
|
||||
for (int i = 0; i < depth -1; i++) {
|
||||
|
||||
List<Integer> nextNodes = new ArrayList<>();
|
||||
for (int nodeId : childNodes) {
|
||||
if (i == depth -2) {
|
||||
builder.addLeaf(nodeId, randomDouble());
|
||||
} else {
|
||||
TreeNode.Builder childNode =
|
||||
builder.addJunction(nodeId, randomInt(numFeatures), true, randomDouble());
|
||||
nextNodes.add(childNode.getLeftChild());
|
||||
nextNodes.add(childNode.getRightChild());
|
||||
}
|
||||
}
|
||||
childNodes = nextNodes;
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
}
|
|
@ -23,11 +23,11 @@ import org.elasticsearch.test.ESTestCase;
|
|||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class DeleteDataFrameTransformRequestTests extends ESTestCase {
|
||||
public class DeleteTransformRequestTests extends ESTestCase {
|
||||
|
||||
public void testValidate() {
|
||||
assertFalse(new DeleteDataFrameTransformRequest("valid-id").validate().isPresent());
|
||||
assertThat(new DeleteDataFrameTransformRequest(null).validate().get().getMessage(),
|
||||
containsString("data frame transform id must not be null"));
|
||||
assertFalse(new DeleteTransformRequest("valid-id").validate().isPresent());
|
||||
assertThat(new DeleteTransformRequest(null).validate().get().getMessage(),
|
||||
containsString("transform id must not be null"));
|
||||
}
|
||||
}
|
|
@ -23,10 +23,10 @@ import org.elasticsearch.test.ESTestCase;
|
|||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class GetDataFrameTransformRequestTests extends ESTestCase {
|
||||
public class GetTransformRequestTests extends ESTestCase {
|
||||
public void testValidate() {
|
||||
assertFalse(new GetDataFrameTransformRequest("valid-id").validate().isPresent());
|
||||
assertThat(new GetDataFrameTransformRequest(new String[0]).validate().get().getMessage(),
|
||||
containsString("data frame transform id must not be null"));
|
||||
assertFalse(new GetTransformRequest("valid-id").validate().isPresent());
|
||||
assertThat(new GetTransformRequest(new String[0]).validate().get().getMessage(),
|
||||
containsString("transform id must not be null"));
|
||||
}
|
||||
}
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.client.transform;
|
||||
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -35,32 +35,32 @@ import java.util.List;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class GetDataFrameTransformResponseTests extends ESTestCase {
|
||||
public class GetTransformResponseTests extends ESTestCase {
|
||||
|
||||
public void testXContentParser() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
GetDataFrameTransformResponseTests::createTestInstance,
|
||||
GetDataFrameTransformResponseTests::toXContent,
|
||||
GetDataFrameTransformResponse::fromXContent)
|
||||
GetTransformResponseTests::createTestInstance,
|
||||
GetTransformResponseTests::toXContent,
|
||||
GetTransformResponse::fromXContent)
|
||||
.supportsUnknownFields(false)
|
||||
.test();
|
||||
}
|
||||
|
||||
private static GetDataFrameTransformResponse createTestInstance() {
|
||||
private static GetTransformResponse createTestInstance() {
|
||||
int numTransforms = randomIntBetween(0, 3);
|
||||
List<DataFrameTransformConfig> transforms = new ArrayList<>();
|
||||
List<TransformConfig> transforms = new ArrayList<>();
|
||||
for (int i=0; i<numTransforms; i++) {
|
||||
transforms.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
|
||||
transforms.add(TransformConfigTests.randomTransformConfig());
|
||||
}
|
||||
GetDataFrameTransformResponse.InvalidTransforms invalidTransforms = null;
|
||||
GetTransformResponse.InvalidTransforms invalidTransforms = null;
|
||||
if (randomBoolean()) {
|
||||
List<String> invalidIds = Arrays.asList(generateRandomStringArray(5, 6, false, false));
|
||||
invalidTransforms = new GetDataFrameTransformResponse.InvalidTransforms(invalidIds);
|
||||
invalidTransforms = new GetTransformResponse.InvalidTransforms(invalidIds);
|
||||
}
|
||||
return new GetDataFrameTransformResponse(transforms, transforms.size() + 10, invalidTransforms);
|
||||
return new GetTransformResponse(transforms, transforms.size() + 10, invalidTransforms);
|
||||
}
|
||||
|
||||
private static void toXContent(GetDataFrameTransformResponse response, XContentBuilder builder) throws IOException {
|
||||
private static void toXContent(GetTransformResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field("count", response.getCount());
|
||||
|
@ -79,7 +79,7 @@ public class GetDataFrameTransformResponseTests extends ESTestCase {
|
|||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
|
||||
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
|
||||
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
|
||||
|
||||
return new NamedXContentRegistry(namedXContents);
|
||||
}
|
|
@ -23,10 +23,10 @@ import org.elasticsearch.test.ESTestCase;
|
|||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class GetDataFrameTransformStatsRequestTests extends ESTestCase {
|
||||
public class GetTransformStatsRequestTests extends ESTestCase {
|
||||
public void testValidate() {
|
||||
assertFalse(new GetDataFrameTransformStatsRequest("valid-id").validate().isPresent());
|
||||
assertThat(new GetDataFrameTransformStatsRequest(null).validate().get().getMessage(),
|
||||
containsString("data frame transform id must not be null"));
|
||||
assertFalse(new GetTransformStatsRequest("valid-id").validate().isPresent());
|
||||
assertThat(new GetTransformStatsRequest(null).validate().get().getMessage(),
|
||||
containsString("transform id must not be null"));
|
||||
}
|
||||
}
|
|
@ -21,8 +21,8 @@ package org.elasticsearch.client.transform;
|
|||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformStats;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformStatsTests;
|
||||
import org.elasticsearch.client.transform.transforms.TransformStats;
|
||||
import org.elasticsearch.client.transform.transforms.TransformStatsTests;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
|
@ -32,25 +32,25 @@ import java.util.List;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class GetDataFrameTransformStatsResponseTests extends ESTestCase {
|
||||
public class GetTransformStatsResponseTests extends ESTestCase {
|
||||
|
||||
public void testXContentParser() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
GetDataFrameTransformStatsResponseTests::createTestInstance,
|
||||
GetDataFrameTransformStatsResponseTests::toXContent,
|
||||
GetDataFrameTransformStatsResponse::fromXContent)
|
||||
.assertEqualsConsumer(GetDataFrameTransformStatsResponseTests::assertEqualInstances)
|
||||
GetTransformStatsResponseTests::createTestInstance,
|
||||
GetTransformStatsResponseTests::toXContent,
|
||||
GetTransformStatsResponse::fromXContent)
|
||||
.assertEqualsConsumer(GetTransformStatsResponseTests::assertEqualInstances)
|
||||
.assertToXContentEquivalence(false)
|
||||
.supportsUnknownFields(true)
|
||||
.randomFieldsExcludeFilter(path -> path.isEmpty() == false)
|
||||
.test();
|
||||
}
|
||||
|
||||
private static GetDataFrameTransformStatsResponse createTestInstance() {
|
||||
private static GetTransformStatsResponse createTestInstance() {
|
||||
int count = randomIntBetween(1, 3);
|
||||
List<DataFrameTransformStats> stats = new ArrayList<>();
|
||||
List<TransformStats> stats = new ArrayList<>();
|
||||
for (int i=0; i<count; i++) {
|
||||
stats.add(DataFrameTransformStatsTests.randomInstance());
|
||||
stats.add(TransformStatsTests.randomInstance());
|
||||
}
|
||||
|
||||
List<TaskOperationFailure> taskFailures = null;
|
||||
|
@ -66,19 +66,19 @@ public class GetDataFrameTransformStatsResponseTests extends ESTestCase {
|
|||
nodeFailures = new ArrayList<>();
|
||||
int numNodeFailures = randomIntBetween(1, 4);
|
||||
for (int i=0; i<numNodeFailures; i++) {
|
||||
nodeFailures.add(new ElasticsearchException("GetDataFrameTransformStatsResponseTests"));
|
||||
nodeFailures.add(new ElasticsearchException("GetTransformStatsResponseTests"));
|
||||
}
|
||||
}
|
||||
|
||||
return new GetDataFrameTransformStatsResponse(stats, taskFailures, nodeFailures);
|
||||
return new GetTransformStatsResponse(stats, taskFailures, nodeFailures);
|
||||
}
|
||||
|
||||
private static void toXContent(GetDataFrameTransformStatsResponse response, XContentBuilder builder) throws IOException {
|
||||
private static void toXContent(GetTransformStatsResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startArray("transforms");
|
||||
for (DataFrameTransformStats stats : response.getTransformsStats()) {
|
||||
DataFrameTransformStatsTests.toXContent(stats, builder);
|
||||
for (TransformStats stats : response.getTransformsStats()) {
|
||||
TransformStatsTests.toXContent(stats, builder);
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
|
@ -90,8 +90,8 @@ public class GetDataFrameTransformStatsResponseTests extends ESTestCase {
|
|||
|
||||
// Serialisation of TaskOperationFailure and ElasticsearchException changes
|
||||
// the object so use a custom compare method rather than Object.equals
|
||||
private static void assertEqualInstances(GetDataFrameTransformStatsResponse expected,
|
||||
GetDataFrameTransformStatsResponse actual) {
|
||||
private static void assertEqualInstances(GetTransformStatsResponse expected,
|
||||
GetTransformStatsResponse actual) {
|
||||
assertEquals(expected.getTransformsStats(), actual.getTransformsStats());
|
||||
AcknowledgedTasksResponseTests.assertTaskOperationFailuresEqual(expected.getTaskFailures(), actual.getTaskFailures());
|
||||
AcknowledgedTasksResponseTests.assertNodeFailuresEqual(expected.getNodeFailures(), actual.getNodeFailures());
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.client.transform;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
|
||||
import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -37,15 +37,15 @@ import java.util.Optional;
|
|||
import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class PreviewDataFrameTransformRequestTests extends AbstractXContentTestCase<PreviewDataFrameTransformRequest> {
|
||||
public class PreviewTransformRequestTests extends AbstractXContentTestCase<PreviewTransformRequest> {
|
||||
@Override
|
||||
protected PreviewDataFrameTransformRequest createTestInstance() {
|
||||
return new PreviewDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
|
||||
protected PreviewTransformRequest createTestInstance() {
|
||||
return new PreviewTransformRequest(TransformConfigTests.randomTransformConfig());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PreviewDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return new PreviewDataFrameTransformRequest(DataFrameTransformConfig.fromXContent(parser));
|
||||
protected PreviewTransformRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return new PreviewTransformRequest(TransformConfig.fromXContent(parser));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -57,27 +57,27 @@ public class PreviewDataFrameTransformRequestTests extends AbstractXContentTestC
|
|||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
|
||||
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
|
||||
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
|
||||
|
||||
return new NamedXContentRegistry(namedXContents);
|
||||
}
|
||||
|
||||
public void testValidate() {
|
||||
assertFalse(new PreviewDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig())
|
||||
assertFalse(new PreviewTransformRequest(TransformConfigTests.randomTransformConfig())
|
||||
.validate().isPresent());
|
||||
assertThat(new PreviewDataFrameTransformRequest(null).validate().get().getMessage(),
|
||||
containsString("preview requires a non-null data frame config"));
|
||||
assertThat(new PreviewTransformRequest(null).validate().get().getMessage(),
|
||||
containsString("preview requires a non-null transform config"));
|
||||
|
||||
// null id and destination is valid
|
||||
DataFrameTransformConfig config = DataFrameTransformConfig.forPreview(randomSourceConfig(), PivotConfigTests.randomPivotConfig());
|
||||
TransformConfig config = TransformConfig.forPreview(randomSourceConfig(), PivotConfigTests.randomPivotConfig());
|
||||
|
||||
assertFalse(new PreviewDataFrameTransformRequest(config).validate().isPresent());
|
||||
assertFalse(new PreviewTransformRequest(config).validate().isPresent());
|
||||
|
||||
// null source is not valid
|
||||
config = DataFrameTransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build();
|
||||
config = TransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build();
|
||||
|
||||
Optional<ValidationException> error = new PreviewDataFrameTransformRequest(config).validate();
|
||||
Optional<ValidationException> error = new PreviewTransformRequest(config).validate();
|
||||
assertTrue(error.isPresent());
|
||||
assertThat(error.get().getMessage(), containsString("data frame transform source cannot be null"));
|
||||
assertThat(error.get().getMessage(), containsString("transform source cannot be null"));
|
||||
}
|
||||
}
|
|
@ -31,19 +31,19 @@ import java.util.Map;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class PreviewDataFrameTransformResponseTests extends ESTestCase {
|
||||
public class PreviewTransformResponseTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
this::createTestInstance,
|
||||
this::toXContent,
|
||||
PreviewDataFrameTransformResponse::fromXContent)
|
||||
PreviewTransformResponse::fromXContent)
|
||||
.supportsUnknownFields(true)
|
||||
.randomFieldsExcludeFilter(path -> path.isEmpty() == false)
|
||||
.test();
|
||||
}
|
||||
|
||||
private PreviewDataFrameTransformResponse createTestInstance() {
|
||||
private PreviewTransformResponse createTestInstance() {
|
||||
int numDocs = randomIntBetween(5, 10);
|
||||
List<Map<String, Object>> docs = new ArrayList<>(numDocs);
|
||||
for (int i=0; i<numDocs; i++) {
|
||||
|
@ -60,10 +60,10 @@ public class PreviewDataFrameTransformResponseTests extends ESTestCase {
|
|||
mappings.put(randomAlphaOfLength(10), Collections.singletonMap("type", randomAlphaOfLength(10)));
|
||||
}
|
||||
|
||||
return new PreviewDataFrameTransformResponse(docs, mappings);
|
||||
return new PreviewTransformResponse(docs, mappings);
|
||||
}
|
||||
|
||||
private void toXContent(PreviewDataFrameTransformResponse response, XContentBuilder builder) throws IOException {
|
||||
private void toXContent(PreviewTransformResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
builder.startArray("preview");
|
||||
for (Map<String, Object> doc : response.getDocs()) {
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.client.transform;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfig;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
|
||||
import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -36,32 +36,32 @@ import java.util.Optional;
|
|||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase<PutDataFrameTransformRequest> {
|
||||
public class PutTransformRequestTests extends AbstractXContentTestCase<PutTransformRequest> {
|
||||
|
||||
public void testValidate() {
|
||||
assertFalse(createTestInstance().validate().isPresent());
|
||||
|
||||
DataFrameTransformConfig config = DataFrameTransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build();
|
||||
TransformConfig config = TransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build();
|
||||
|
||||
Optional<ValidationException> error = new PutDataFrameTransformRequest(config).validate();
|
||||
Optional<ValidationException> error = new PutTransformRequest(config).validate();
|
||||
assertTrue(error.isPresent());
|
||||
assertThat(error.get().getMessage(), containsString("data frame transform id cannot be null"));
|
||||
assertThat(error.get().getMessage(), containsString("data frame transform source cannot be null"));
|
||||
assertThat(error.get().getMessage(), containsString("data frame transform destination cannot be null"));
|
||||
assertThat(error.get().getMessage(), containsString("transform id cannot be null"));
|
||||
assertThat(error.get().getMessage(), containsString("transform source cannot be null"));
|
||||
assertThat(error.get().getMessage(), containsString("transform destination cannot be null"));
|
||||
|
||||
error = new PutDataFrameTransformRequest(null).validate();
|
||||
error = new PutTransformRequest(null).validate();
|
||||
assertTrue(error.isPresent());
|
||||
assertThat(error.get().getMessage(), containsString("put requires a non-null data frame config"));
|
||||
assertThat(error.get().getMessage(), containsString("put requires a non-null transform config"));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PutDataFrameTransformRequest createTestInstance() {
|
||||
return new PutDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
|
||||
protected PutTransformRequest createTestInstance() {
|
||||
return new PutTransformRequest(TransformConfigTests.randomTransformConfig());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PutDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return new PutDataFrameTransformRequest(DataFrameTransformConfig.fromXContent(parser));
|
||||
protected PutTransformRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return new PutTransformRequest(TransformConfig.fromXContent(parser));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -73,7 +73,7 @@ public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase<
|
|||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
|
||||
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
|
||||
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
|
||||
|
||||
return new NamedXContentRegistry(namedXContents);
|
||||
}
|
|
@ -28,14 +28,14 @@ import static org.hamcrest.Matchers.containsString;
|
|||
|
||||
public class StartDataFrameTransformRequestTests extends ESTestCase {
|
||||
public void testValidate_givenNullId() {
|
||||
StartDataFrameTransformRequest request = new StartDataFrameTransformRequest(null, null);
|
||||
StartTransformRequest request = new StartTransformRequest(null, null);
|
||||
Optional<ValidationException> validate = request.validate();
|
||||
assertTrue(validate.isPresent());
|
||||
assertThat(validate.get().getMessage(), containsString("data frame transform id must not be null"));
|
||||
assertThat(validate.get().getMessage(), containsString("transform id must not be null"));
|
||||
}
|
||||
|
||||
public void testValidate_givenValid() {
|
||||
StartDataFrameTransformRequest request = new StartDataFrameTransformRequest("foo", null);
|
||||
StartTransformRequest request = new StartTransformRequest("foo", null);
|
||||
Optional<ValidationException> validate = request.validate();
|
||||
assertFalse(validate.isPresent());
|
||||
}
|
||||
|
|
|
@ -26,16 +26,16 @@ import java.util.Optional;
|
|||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class StopDataFrameTransformRequestTests extends ESTestCase {
|
||||
public class StopTransformRequestTests extends ESTestCase {
|
||||
public void testValidate_givenNullId() {
|
||||
StopDataFrameTransformRequest request = new StopDataFrameTransformRequest(null);
|
||||
StopTransformRequest request = new StopTransformRequest(null);
|
||||
Optional<ValidationException> validate = request.validate();
|
||||
assertTrue(validate.isPresent());
|
||||
assertThat(validate.get().getMessage(), containsString("data frame transform id must not be null"));
|
||||
assertThat(validate.get().getMessage(), containsString("transform id must not be null"));
|
||||
}
|
||||
|
||||
public void testValidate_givenValid() {
|
||||
StopDataFrameTransformRequest request = new StopDataFrameTransformRequest("foo");
|
||||
StopTransformRequest request = new StopTransformRequest("foo");
|
||||
Optional<ValidationException> validate = request.validate();
|
||||
assertFalse(validate.isPresent());
|
||||
}
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.client.transform;
|
||||
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigTests;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -38,17 +38,17 @@ public class UpdateDataFrameTransformResponseTests extends ESTestCase {
|
|||
xContentTester(this::createParser,
|
||||
UpdateDataFrameTransformResponseTests::createTestInstance,
|
||||
UpdateDataFrameTransformResponseTests::toXContent,
|
||||
UpdateDataFrameTransformResponse::fromXContent)
|
||||
UpdateTransformResponse::fromXContent)
|
||||
.assertToXContentEquivalence(false)
|
||||
.supportsUnknownFields(false)
|
||||
.test();
|
||||
}
|
||||
|
||||
private static UpdateDataFrameTransformResponse createTestInstance() {
|
||||
return new UpdateDataFrameTransformResponse(DataFrameTransformConfigTests.randomDataFrameTransformConfig());
|
||||
private static UpdateTransformResponse createTestInstance() {
|
||||
return new UpdateTransformResponse(TransformConfigTests.randomTransformConfig());
|
||||
}
|
||||
|
||||
private static void toXContent(UpdateDataFrameTransformResponse response, XContentBuilder builder) throws IOException {
|
||||
private static void toXContent(UpdateTransformResponse response, XContentBuilder builder) throws IOException {
|
||||
response.getTransformConfiguration().toXContent(builder, null);
|
||||
}
|
||||
|
||||
|
@ -56,7 +56,7 @@ public class UpdateDataFrameTransformResponseTests extends ESTestCase {
|
|||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
|
||||
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
|
||||
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
|
||||
|
||||
return new NamedXContentRegistry(namedXContents);
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.client.transform;
|
||||
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate;
|
||||
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -32,34 +32,34 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdateTests.randomDataFrameTransformConfigUpdate;
|
||||
import static org.elasticsearch.client.transform.transforms.TransformConfigUpdateTests.randomTransformConfigUpdate;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class UpdateDataFrameTransformRequestTests extends AbstractXContentTestCase<UpdateDataFrameTransformRequest> {
|
||||
public class UpdateTransformRequestTests extends AbstractXContentTestCase<UpdateTransformRequest> {
|
||||
|
||||
public void testValidate() {
|
||||
assertFalse(createTestInstance().validate().isPresent());
|
||||
|
||||
DataFrameTransformConfigUpdate config = randomDataFrameTransformConfigUpdate();
|
||||
TransformConfigUpdate config = randomTransformConfigUpdate();
|
||||
|
||||
Optional<ValidationException> error = new UpdateDataFrameTransformRequest(config, null).validate();
|
||||
Optional<ValidationException> error = new UpdateTransformRequest(config, null).validate();
|
||||
assertTrue(error.isPresent());
|
||||
assertThat(error.get().getMessage(), containsString("data frame transform id cannot be null"));
|
||||
assertThat(error.get().getMessage(), containsString("transform id cannot be null"));
|
||||
|
||||
error = new UpdateDataFrameTransformRequest(null, "123").validate();
|
||||
error = new UpdateTransformRequest(null, "123").validate();
|
||||
assertTrue(error.isPresent());
|
||||
assertThat(error.get().getMessage(), containsString("put requires a non-null data frame config"));
|
||||
assertThat(error.get().getMessage(), containsString("put requires a non-null transform config"));
|
||||
}
|
||||
|
||||
private final String transformId = randomAlphaOfLength(10);
|
||||
@Override
|
||||
protected UpdateDataFrameTransformRequest createTestInstance() {
|
||||
return new UpdateDataFrameTransformRequest(randomDataFrameTransformConfigUpdate(), transformId);
|
||||
protected UpdateTransformRequest createTestInstance() {
|
||||
return new UpdateTransformRequest(randomTransformConfigUpdate(), transformId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UpdateDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return new UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate.fromXContent(parser), transformId);
|
||||
protected UpdateTransformRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return new UpdateTransformRequest(TransformConfigUpdate.fromXContent(parser), transformId);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -71,7 +71,7 @@ public class UpdateDataFrameTransformRequestTests extends AbstractXContentTestCa
|
|||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
|
||||
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
|
||||
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
|
||||
|
||||
return new NamedXContentRegistry(namedXContents);
|
||||
}
|
|
@ -26,38 +26,38 @@ import java.io.IOException;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class DataFrameTransformCheckpointStatsTests extends ESTestCase {
|
||||
public class TransformCheckpointStatsTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
DataFrameTransformCheckpointStatsTests::randomDataFrameTransformCheckpointStats,
|
||||
DataFrameTransformCheckpointStatsTests::toXContent,
|
||||
DataFrameTransformCheckpointStats::fromXContent)
|
||||
TransformCheckpointStatsTests::randomTransformCheckpointStats,
|
||||
TransformCheckpointStatsTests::toXContent,
|
||||
TransformCheckpointStats::fromXContent)
|
||||
.supportsUnknownFields(true)
|
||||
.randomFieldsExcludeFilter(field -> field.startsWith("position"))
|
||||
.test();
|
||||
}
|
||||
|
||||
public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() {
|
||||
return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000),
|
||||
randomBoolean() ? null : DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(),
|
||||
randomBoolean() ? null : DataFrameTransformProgressTests.randomInstance(),
|
||||
public static TransformCheckpointStats randomTransformCheckpointStats() {
|
||||
return new TransformCheckpointStats(randomLongBetween(1, 1_000_000),
|
||||
randomBoolean() ? null : TransformIndexerPositionTests.randomTransformIndexerPosition(),
|
||||
randomBoolean() ? null : TransformProgressTests.randomInstance(),
|
||||
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
|
||||
}
|
||||
|
||||
public static void toXContent(DataFrameTransformCheckpointStats stats, XContentBuilder builder) throws IOException {
|
||||
public static void toXContent(TransformCheckpointStats stats, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DataFrameTransformCheckpointStats.CHECKPOINT.getPreferredName(), stats.getCheckpoint());
|
||||
builder.field(TransformCheckpointStats.CHECKPOINT.getPreferredName(), stats.getCheckpoint());
|
||||
if (stats.getPosition() != null) {
|
||||
builder.field(DataFrameTransformCheckpointStats.POSITION.getPreferredName());
|
||||
DataFrameIndexerPositionTests.toXContent(stats.getPosition(), builder);
|
||||
builder.field(TransformCheckpointStats.POSITION.getPreferredName());
|
||||
TransformIndexerPositionTests.toXContent(stats.getPosition(), builder);
|
||||
}
|
||||
if (stats.getCheckpointProgress() != null) {
|
||||
builder.field(DataFrameTransformCheckpointStats.CHECKPOINT_PROGRESS.getPreferredName());
|
||||
DataFrameTransformProgressTests.toXContent(stats.getCheckpointProgress(), builder);
|
||||
builder.field(TransformCheckpointStats.CHECKPOINT_PROGRESS.getPreferredName());
|
||||
TransformProgressTests.toXContent(stats.getCheckpointProgress(), builder);
|
||||
}
|
||||
builder.field(DataFrameTransformCheckpointStats.TIMESTAMP_MILLIS.getPreferredName(), stats.getTimestampMillis());
|
||||
builder.field(DataFrameTransformCheckpointStats.TIME_UPPER_BOUND_MILLIS.getPreferredName(), stats.getTimeUpperBoundMillis());
|
||||
builder.field(TransformCheckpointStats.TIMESTAMP_MILLIS.getPreferredName(), stats.getTimestampMillis());
|
||||
builder.field(TransformCheckpointStats.TIME_UPPER_BOUND_MILLIS.getPreferredName(), stats.getTimeUpperBoundMillis());
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
|
@ -27,38 +27,38 @@ import java.time.Instant;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class DataFrameTransformCheckpointingInfoTests extends ESTestCase {
|
||||
public class TransformCheckpointingInfoTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
DataFrameTransformCheckpointingInfoTests::randomDataFrameTransformCheckpointingInfo,
|
||||
DataFrameTransformCheckpointingInfoTests::toXContent,
|
||||
DataFrameTransformCheckpointingInfo::fromXContent)
|
||||
TransformCheckpointingInfoTests::randomTransformCheckpointingInfo,
|
||||
TransformCheckpointingInfoTests::toXContent,
|
||||
TransformCheckpointingInfo::fromXContent)
|
||||
.supportsUnknownFields(false)
|
||||
.test();
|
||||
}
|
||||
|
||||
public static DataFrameTransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
|
||||
return new DataFrameTransformCheckpointingInfo(
|
||||
DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
|
||||
DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
|
||||
public static TransformCheckpointingInfo randomTransformCheckpointingInfo() {
|
||||
return new TransformCheckpointingInfo(
|
||||
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
|
||||
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
|
||||
randomLongBetween(0, 10000),
|
||||
randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong()));
|
||||
}
|
||||
|
||||
public static void toXContent(DataFrameTransformCheckpointingInfo info, XContentBuilder builder) throws IOException {
|
||||
public static void toXContent(TransformCheckpointingInfo info, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
if (info.getLast().getTimestampMillis() > 0) {
|
||||
builder.field(DataFrameTransformCheckpointingInfo.LAST_CHECKPOINT.getPreferredName());
|
||||
DataFrameTransformCheckpointStatsTests.toXContent(info.getLast(), builder);
|
||||
builder.field(TransformCheckpointingInfo.LAST_CHECKPOINT.getPreferredName());
|
||||
TransformCheckpointStatsTests.toXContent(info.getLast(), builder);
|
||||
}
|
||||
if (info.getNext().getTimestampMillis() > 0) {
|
||||
builder.field(DataFrameTransformCheckpointingInfo.NEXT_CHECKPOINT.getPreferredName());
|
||||
DataFrameTransformCheckpointStatsTests.toXContent(info.getNext(), builder);
|
||||
builder.field(TransformCheckpointingInfo.NEXT_CHECKPOINT.getPreferredName());
|
||||
TransformCheckpointStatsTests.toXContent(info.getNext(), builder);
|
||||
}
|
||||
builder.field(DataFrameTransformCheckpointingInfo.OPERATIONS_BEHIND.getPreferredName(), info.getOperationsBehind());
|
||||
builder.field(TransformCheckpointingInfo.OPERATIONS_BEHIND.getPreferredName(), info.getOperationsBehind());
|
||||
if (info.getChangesLastDetectedAt() != null) {
|
||||
builder.field(DataFrameTransformCheckpointingInfo.CHANGES_LAST_DETECTED_AT.getPreferredName(), info.getChangesLastDetectedAt());
|
||||
builder.field(TransformCheckpointingInfo.CHANGES_LAST_DETECTED_AT.getPreferredName(), info.getChangesLastDetectedAt());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.client.transform.transforms;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.transform.DataFrameNamedXContentProvider;
|
||||
import org.elasticsearch.client.transform.TransformNamedXContentProvider;
|
||||
import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -38,10 +38,10 @@ import java.util.function.Predicate;
|
|||
import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig;
|
||||
import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig;
|
||||
|
||||
public class DataFrameTransformConfigTests extends AbstractXContentTestCase<DataFrameTransformConfig> {
|
||||
public class TransformConfigTests extends AbstractXContentTestCase<TransformConfig> {
|
||||
|
||||
public static DataFrameTransformConfig randomDataFrameTransformConfig() {
|
||||
return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10),
|
||||
public static TransformConfig randomTransformConfig() {
|
||||
return new TransformConfig(randomAlphaOfLengthBetween(1, 10),
|
||||
randomSourceConfig(),
|
||||
randomDestConfig(),
|
||||
randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1000, 1000000)),
|
||||
|
@ -57,13 +57,13 @@ public class DataFrameTransformConfigTests extends AbstractXContentTestCase<Data
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DataFrameTransformConfig createTestInstance() {
|
||||
return randomDataFrameTransformConfig();
|
||||
protected TransformConfig createTestInstance() {
|
||||
return randomTransformConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataFrameTransformConfig doParseInstance(XContentParser parser) throws IOException {
|
||||
return DataFrameTransformConfig.fromXContent(parser);
|
||||
protected TransformConfig doParseInstance(XContentParser parser) throws IOException {
|
||||
return TransformConfig.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -81,7 +81,7 @@ public class DataFrameTransformConfigTests extends AbstractXContentTestCase<Data
|
|||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
|
||||
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
|
||||
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
|
||||
|
||||
return new NamedXContentRegistry(namedXContents);
|
||||
}
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.client.transform.transforms;
|
||||
|
||||
import org.elasticsearch.client.transform.DataFrameNamedXContentProvider;
|
||||
import org.elasticsearch.client.transform.TransformNamedXContentProvider;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -34,10 +34,10 @@ import java.util.List;
|
|||
import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig;
|
||||
import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig;
|
||||
|
||||
public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCase<DataFrameTransformConfigUpdate> {
|
||||
public class TransformConfigUpdateTests extends AbstractXContentTestCase<TransformConfigUpdate> {
|
||||
|
||||
public static DataFrameTransformConfigUpdate randomDataFrameTransformConfigUpdate() {
|
||||
return new DataFrameTransformConfigUpdate(
|
||||
public static TransformConfigUpdate randomTransformConfigUpdate() {
|
||||
return new TransformConfigUpdate(
|
||||
randomBoolean() ? null : randomSourceConfig(),
|
||||
randomBoolean() ? null : randomDestConfig(),
|
||||
randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)),
|
||||
|
@ -50,8 +50,8 @@ public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCas
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DataFrameTransformConfigUpdate doParseInstance(XContentParser parser) throws IOException {
|
||||
return DataFrameTransformConfigUpdate.fromXContent(parser);
|
||||
protected TransformConfigUpdate doParseInstance(XContentParser parser) throws IOException {
|
||||
return TransformConfigUpdate.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -60,15 +60,15 @@ public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCas
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DataFrameTransformConfigUpdate createTestInstance() {
|
||||
return randomDataFrameTransformConfigUpdate();
|
||||
protected TransformConfigUpdate createTestInstance() {
|
||||
return randomTransformConfigUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedXContentRegistry xContentRegistry() {
|
||||
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
|
||||
List<NamedXContentRegistry.Entry> namedXContents = searchModule.getNamedXContents();
|
||||
namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers());
|
||||
namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers());
|
||||
|
||||
return new NamedXContentRegistry(namedXContents);
|
||||
}
|
|
@ -28,24 +28,24 @@ import java.util.Map;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class DataFrameIndexerPositionTests extends ESTestCase {
|
||||
public class TransformIndexerPositionTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
DataFrameIndexerPositionTests::randomDataFrameIndexerPosition,
|
||||
DataFrameIndexerPositionTests::toXContent,
|
||||
DataFrameIndexerPosition::fromXContent)
|
||||
TransformIndexerPositionTests::randomTransformIndexerPosition,
|
||||
TransformIndexerPositionTests::toXContent,
|
||||
TransformIndexerPosition::fromXContent)
|
||||
.supportsUnknownFields(true)
|
||||
.randomFieldsExcludeFilter(field -> field.equals("indexer_position") ||
|
||||
field.equals("bucket_position"))
|
||||
.test();
|
||||
}
|
||||
|
||||
public static DataFrameIndexerPosition randomDataFrameIndexerPosition() {
|
||||
return new DataFrameIndexerPosition(randomPositionMap(), randomPositionMap());
|
||||
public static TransformIndexerPosition randomTransformIndexerPosition() {
|
||||
return new TransformIndexerPosition(randomPositionMap(), randomPositionMap());
|
||||
}
|
||||
|
||||
public static void toXContent(DataFrameIndexerPosition position, XContentBuilder builder) throws IOException {
|
||||
public static void toXContent(TransformIndexerPosition position, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
if (position.getIndexerPosition() != null) {
|
||||
builder.field("indexer_position", position.getIndexerPosition());
|
|
@ -27,20 +27,20 @@ import java.io.IOException;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class DataFrameIndexerTransformStatsTests extends ESTestCase {
|
||||
public class TransformIndexerStatsTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(
|
||||
this::createParser,
|
||||
DataFrameIndexerTransformStatsTests::randomStats,
|
||||
DataFrameIndexerTransformStatsTests::toXContent,
|
||||
DataFrameIndexerTransformStats::fromXContent)
|
||||
TransformIndexerStatsTests::randomStats,
|
||||
TransformIndexerStatsTests::toXContent,
|
||||
TransformIndexerStats::fromXContent)
|
||||
.supportsUnknownFields(true)
|
||||
.test();
|
||||
}
|
||||
|
||||
public static DataFrameIndexerTransformStats randomStats() {
|
||||
return new DataFrameIndexerTransformStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
public static TransformIndexerStats randomStats() {
|
||||
return new TransformIndexerStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
randomBoolean() ? null : randomDouble(),
|
||||
|
@ -48,7 +48,7 @@ public class DataFrameIndexerTransformStatsTests extends ESTestCase {
|
|||
randomBoolean() ? null : randomDouble());
|
||||
}
|
||||
|
||||
public static void toXContent(DataFrameIndexerTransformStats stats, XContentBuilder builder) throws IOException {
|
||||
public static void toXContent(TransformIndexerStats stats, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(IndexerJobStats.NUM_PAGES.getPreferredName(), stats.getNumPages());
|
||||
builder.field(IndexerJobStats.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments());
|
||||
|
@ -60,11 +60,11 @@ public class DataFrameIndexerTransformStatsTests extends ESTestCase {
|
|||
builder.field(IndexerJobStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime());
|
||||
builder.field(IndexerJobStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal());
|
||||
builder.field(IndexerJobStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures());
|
||||
builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(),
|
||||
builder.field(TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(),
|
||||
stats.getExpAvgCheckpointDurationMs());
|
||||
builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(),
|
||||
builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(),
|
||||
stats.getExpAvgDocumentsIndexed());
|
||||
builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(),
|
||||
builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(),
|
||||
stats.getExpAvgDocumentsProcessed());
|
||||
builder.endObject();
|
||||
}
|
|
@ -26,19 +26,19 @@ import java.io.IOException;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class DataFrameTransformProgressTests extends ESTestCase {
|
||||
public class TransformProgressTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
DataFrameTransformProgressTests::randomInstance,
|
||||
DataFrameTransformProgressTests::toXContent,
|
||||
DataFrameTransformProgress::fromXContent)
|
||||
TransformProgressTests::randomInstance,
|
||||
TransformProgressTests::toXContent,
|
||||
TransformProgress::fromXContent)
|
||||
.supportsUnknownFields(true)
|
||||
.test();
|
||||
}
|
||||
|
||||
public static DataFrameTransformProgress randomInstance() {
|
||||
return new DataFrameTransformProgress(
|
||||
public static TransformProgress randomInstance() {
|
||||
return new TransformProgress(
|
||||
randomBoolean() ? null : randomNonNegativeLong(),
|
||||
randomBoolean() ? null : randomNonNegativeLong(),
|
||||
randomBoolean() ? null : randomDouble(),
|
||||
|
@ -46,19 +46,19 @@ public class DataFrameTransformProgressTests extends ESTestCase {
|
|||
randomBoolean() ? null : randomNonNegativeLong());
|
||||
}
|
||||
|
||||
public static void toXContent(DataFrameTransformProgress progress, XContentBuilder builder) throws IOException {
|
||||
public static void toXContent(TransformProgress progress, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
if (progress.getTotalDocs() != null) {
|
||||
builder.field(DataFrameTransformProgress.TOTAL_DOCS.getPreferredName(), progress.getTotalDocs());
|
||||
builder.field(TransformProgress.TOTAL_DOCS.getPreferredName(), progress.getTotalDocs());
|
||||
}
|
||||
if (progress.getPercentComplete() != null) {
|
||||
builder.field(DataFrameTransformProgress.PERCENT_COMPLETE.getPreferredName(), progress.getPercentComplete());
|
||||
builder.field(TransformProgress.PERCENT_COMPLETE.getPreferredName(), progress.getPercentComplete());
|
||||
}
|
||||
if (progress.getRemainingDocs() != null) {
|
||||
builder.field(DataFrameTransformProgress.DOCS_REMAINING.getPreferredName(), progress.getRemainingDocs());
|
||||
builder.field(TransformProgress.DOCS_REMAINING.getPreferredName(), progress.getRemainingDocs());
|
||||
}
|
||||
builder.field(DataFrameTransformProgress.DOCS_INDEXED.getPreferredName(), progress.getDocumentsIndexed());
|
||||
builder.field(DataFrameTransformProgress.DOCS_PROCESSED.getPreferredName(), progress.getDocumentsProcessed());
|
||||
builder.field(TransformProgress.DOCS_INDEXED.getPreferredName(), progress.getDocumentsIndexed());
|
||||
builder.field(TransformProgress.DOCS_PROCESSED.getPreferredName(), progress.getDocumentsProcessed());
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
|
@ -27,46 +27,46 @@ import java.io.IOException;
|
|||
|
||||
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||
|
||||
public class DataFrameTransformStatsTests extends ESTestCase {
|
||||
public class TransformStatsTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
xContentTester(this::createParser,
|
||||
DataFrameTransformStatsTests::randomInstance,
|
||||
DataFrameTransformStatsTests::toXContent,
|
||||
DataFrameTransformStats::fromXContent)
|
||||
TransformStatsTests::randomInstance,
|
||||
TransformStatsTests::toXContent,
|
||||
TransformStats::fromXContent)
|
||||
.supportsUnknownFields(true)
|
||||
.randomFieldsExcludeFilter(field -> field.equals("node.attributes") || field.contains("position"))
|
||||
.test();
|
||||
}
|
||||
|
||||
public static DataFrameTransformStats randomInstance() {
|
||||
return new DataFrameTransformStats(randomAlphaOfLength(10),
|
||||
randomBoolean() ? null : randomFrom(DataFrameTransformStats.State.values()),
|
||||
public static TransformStats randomInstance() {
|
||||
return new TransformStats(randomAlphaOfLength(10),
|
||||
randomBoolean() ? null : randomFrom(TransformStats.State.values()),
|
||||
randomBoolean() ? null : randomAlphaOfLength(100),
|
||||
randomBoolean() ? null : NodeAttributesTests.createRandom(),
|
||||
DataFrameIndexerTransformStatsTests.randomStats(),
|
||||
randomBoolean() ? null : DataFrameTransformCheckpointingInfoTests.randomDataFrameTransformCheckpointingInfo());
|
||||
TransformIndexerStatsTests.randomStats(),
|
||||
randomBoolean() ? null : TransformCheckpointingInfoTests.randomTransformCheckpointingInfo());
|
||||
}
|
||||
|
||||
public static void toXContent(DataFrameTransformStats stats, XContentBuilder builder) throws IOException {
|
||||
public static void toXContent(TransformStats stats, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DataFrameTransformStats.ID.getPreferredName(), stats.getId());
|
||||
builder.field(TransformStats.ID.getPreferredName(), stats.getId());
|
||||
if (stats.getState() != null) {
|
||||
builder.field(DataFrameTransformStats.STATE_FIELD.getPreferredName(),
|
||||
builder.field(TransformStats.STATE_FIELD.getPreferredName(),
|
||||
stats.getState().value());
|
||||
}
|
||||
if (stats.getReason() != null) {
|
||||
builder.field(DataFrameTransformStats.REASON_FIELD.getPreferredName(), stats.getReason());
|
||||
builder.field(TransformStats.REASON_FIELD.getPreferredName(), stats.getReason());
|
||||
}
|
||||
if (stats.getNode() != null) {
|
||||
builder.field(DataFrameTransformStats.NODE_FIELD.getPreferredName());
|
||||
builder.field(TransformStats.NODE_FIELD.getPreferredName());
|
||||
stats.getNode().toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
}
|
||||
builder.field(DataFrameTransformStats.STATS_FIELD.getPreferredName());
|
||||
DataFrameIndexerTransformStatsTests.toXContent(stats.getIndexerStats(), builder);
|
||||
builder.field(TransformStats.STATS_FIELD.getPreferredName());
|
||||
TransformIndexerStatsTests.toXContent(stats.getIndexerStats(), builder);
|
||||
if (stats.getCheckpointingInfo() != null) {
|
||||
builder.field(DataFrameTransformStats.CHECKPOINTING_INFO_FIELD.getPreferredName());
|
||||
DataFrameTransformCheckpointingInfoTests.toXContent(stats.getCheckpointingInfo(), builder);
|
||||
builder.field(TransformStats.CHECKPOINTING_INFO_FIELD.getPreferredName());
|
||||
TransformCheckpointingInfoTests.toXContent(stats.getCheckpointingInfo(), builder);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
|
@ -28,40 +28,40 @@ import java.io.IOException;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class DataFrameTransformCheckpointStatsTests extends AbstractResponseTestCase<
|
||||
public class TransformCheckpointStatsTests extends AbstractResponseTestCase<
|
||||
TransformCheckpointStats,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats> {
|
||||
org.elasticsearch.client.transform.transforms.TransformCheckpointStats> {
|
||||
|
||||
public static TransformCheckpointStats fromHlrc(
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats instance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformCheckpointStats instance) {
|
||||
return new TransformCheckpointStats(instance.getCheckpoint(),
|
||||
DataFrameIndexerPositionTests.fromHlrc(instance.getPosition()),
|
||||
DataFrameTransformProgressTests.fromHlrc(instance.getCheckpointProgress()),
|
||||
TransformIndexerPositionTests.fromHlrc(instance.getPosition()),
|
||||
TransformProgressTests.fromHlrc(instance.getCheckpointProgress()),
|
||||
instance.getTimestampMillis(),
|
||||
instance.getTimeUpperBoundMillis());
|
||||
}
|
||||
|
||||
public static TransformCheckpointStats randomDataFrameTransformCheckpointStats() {
|
||||
public static TransformCheckpointStats randomTransformCheckpointStats() {
|
||||
return new TransformCheckpointStats(randomLongBetween(1, 1_000_000),
|
||||
DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(),
|
||||
randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(),
|
||||
TransformIndexerPositionTests.randomTransformIndexerPosition(),
|
||||
randomBoolean() ? null : TransformProgressTests.randomTransformProgress(),
|
||||
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TransformCheckpointStats createServerTestInstance(XContentType xContentType) {
|
||||
return randomDataFrameTransformCheckpointStats();
|
||||
return randomTransformCheckpointStats();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats doParseToClientInstance(XContentParser parser)
|
||||
protected org.elasticsearch.client.transform.transforms.TransformCheckpointStats doParseToClientInstance(XContentParser parser)
|
||||
throws IOException {
|
||||
return org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats.fromXContent(parser);
|
||||
return org.elasticsearch.client.transform.transforms.TransformCheckpointStats.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertInstances(TransformCheckpointStats serverTestInstance,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats clientInstance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformCheckpointStats clientInstance) {
|
||||
assertThat(serverTestInstance.getCheckpoint(), equalTo(clientInstance.getCheckpoint()));
|
||||
assertThat(serverTestInstance.getPosition().getBucketsPosition(), equalTo(clientInstance.getPosition().getBucketsPosition()));
|
||||
assertThat(serverTestInstance.getPosition().getIndexerPosition(), equalTo(clientInstance.getPosition().getIndexerPosition()));
|
|
@ -27,41 +27,41 @@ import java.io.IOException;
|
|||
import java.time.Instant;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class DataFrameTransformCheckpointingInfoTests extends AbstractHlrcXContentTestCase<
|
||||
public class TransformCheckpointingInfoTests extends AbstractHlrcXContentTestCase<
|
||||
TransformCheckpointingInfo,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo> {
|
||||
org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo> {
|
||||
|
||||
public static TransformCheckpointingInfo fromHlrc(
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo instance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo instance) {
|
||||
return new TransformCheckpointingInfo(
|
||||
DataFrameTransformCheckpointStatsTests.fromHlrc(instance.getLast()),
|
||||
DataFrameTransformCheckpointStatsTests.fromHlrc(instance.getNext()),
|
||||
TransformCheckpointStatsTests.fromHlrc(instance.getLast()),
|
||||
TransformCheckpointStatsTests.fromHlrc(instance.getNext()),
|
||||
instance.getOperationsBehind(),
|
||||
instance.getChangesLastDetectedAt());
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo doHlrcParseInstance(XContentParser parser) {
|
||||
return org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo.fromXContent(parser);
|
||||
public org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo doHlrcParseInstance(XContentParser parser) {
|
||||
return org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TransformCheckpointingInfo convertHlrcToInternal(
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo instance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo instance) {
|
||||
return fromHlrc(instance);
|
||||
}
|
||||
|
||||
public static TransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
|
||||
public static TransformCheckpointingInfo randomTransformCheckpointingInfo() {
|
||||
return new TransformCheckpointingInfo(
|
||||
DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
|
||||
DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(),
|
||||
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
|
||||
TransformCheckpointStatsTests.randomTransformCheckpointStats(),
|
||||
randomNonNegativeLong(),
|
||||
randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong()));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TransformCheckpointingInfo createTestInstance() {
|
||||
return randomDataFrameTransformCheckpointingInfo();
|
||||
return randomTransformCheckpointingInfo();
|
||||
}
|
||||
|
||||
@Override
|
|
@ -29,35 +29,35 @@ import java.util.Map;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class DataFrameIndexerPositionTests extends AbstractResponseTestCase<
|
||||
public class TransformIndexerPositionTests extends AbstractResponseTestCase<
|
||||
TransformIndexerPosition,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition> {
|
||||
org.elasticsearch.client.transform.transforms.TransformIndexerPosition> {
|
||||
|
||||
public static TransformIndexerPosition fromHlrc(
|
||||
org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition instance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformIndexerPosition instance) {
|
||||
if (instance == null) {
|
||||
return null;
|
||||
}
|
||||
return new TransformIndexerPosition(instance.getIndexerPosition(), instance.getBucketsPosition());
|
||||
}
|
||||
|
||||
public static TransformIndexerPosition randomDataFrameIndexerPosition() {
|
||||
public static TransformIndexerPosition randomTransformIndexerPosition() {
|
||||
return new TransformIndexerPosition(randomPositionMap(), randomPositionMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TransformIndexerPosition createServerTestInstance(XContentType xContentType) {
|
||||
return randomDataFrameIndexerPosition();
|
||||
return randomTransformIndexerPosition();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition doParseToClientInstance(XContentParser parser) {
|
||||
return org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition.fromXContent(parser);
|
||||
protected org.elasticsearch.client.transform.transforms.TransformIndexerPosition doParseToClientInstance(XContentParser parser) {
|
||||
return org.elasticsearch.client.transform.transforms.TransformIndexerPosition.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertInstances(TransformIndexerPosition serverTestInstance,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition clientInstance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformIndexerPosition clientInstance) {
|
||||
assertThat(serverTestInstance.getIndexerPosition(), equalTo(clientInstance.getIndexerPosition()));
|
||||
assertThat(serverTestInstance.getBucketsPosition(), equalTo(clientInstance.getBucketsPosition()));
|
||||
}
|
|
@ -25,12 +25,12 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTestCase<
|
||||
public class TransformIndexerStatsTests extends AbstractHlrcXContentTestCase<
|
||||
TransformIndexerStats,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats> {
|
||||
org.elasticsearch.client.transform.transforms.TransformIndexerStats> {
|
||||
|
||||
public static TransformIndexerStats fromHlrc(
|
||||
org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats instance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformIndexerStats instance) {
|
||||
return new TransformIndexerStats(
|
||||
instance.getNumPages(),
|
||||
instance.getNumDocuments(),
|
||||
|
@ -48,14 +48,14 @@ public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTes
|
|||
}
|
||||
|
||||
@Override
|
||||
public org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats doHlrcParseInstance(XContentParser parser)
|
||||
public org.elasticsearch.client.transform.transforms.TransformIndexerStats doHlrcParseInstance(XContentParser parser)
|
||||
throws IOException {
|
||||
return org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats.fromXContent(parser);
|
||||
return org.elasticsearch.client.transform.transforms.TransformIndexerStats.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TransformIndexerStats convertHlrcToInternal(
|
||||
org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats instance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformIndexerStats instance) {
|
||||
return fromHlrc(instance);
|
||||
}
|
||||
|
|
@ -26,12 +26,12 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformProgress;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class DataFrameTransformProgressTests extends AbstractResponseTestCase<
|
||||
public class TransformProgressTests extends AbstractResponseTestCase<
|
||||
TransformProgress,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformProgress> {
|
||||
org.elasticsearch.client.transform.transforms.TransformProgress> {
|
||||
|
||||
public static TransformProgress fromHlrc(
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformProgress instance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformProgress instance) {
|
||||
if (instance == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ public class DataFrameTransformProgressTests extends AbstractResponseTestCase<
|
|||
instance.getDocumentsIndexed());
|
||||
}
|
||||
|
||||
public static TransformProgress randomDataFrameTransformProgress() {
|
||||
public static TransformProgress randomTransformProgress() {
|
||||
Long totalDocs = randomBoolean() ? null : randomNonNegativeLong();
|
||||
Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null;
|
||||
return new TransformProgress(
|
||||
|
@ -53,17 +53,17 @@ public class DataFrameTransformProgressTests extends AbstractResponseTestCase<
|
|||
|
||||
@Override
|
||||
protected TransformProgress createServerTestInstance(XContentType xContentType) {
|
||||
return randomDataFrameTransformProgress();
|
||||
return randomTransformProgress();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected org.elasticsearch.client.transform.transforms.DataFrameTransformProgress doParseToClientInstance(XContentParser parser) {
|
||||
return org.elasticsearch.client.transform.transforms.DataFrameTransformProgress.fromXContent(parser);
|
||||
protected org.elasticsearch.client.transform.transforms.TransformProgress doParseToClientInstance(XContentParser parser) {
|
||||
return org.elasticsearch.client.transform.transforms.TransformProgress.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertInstances(TransformProgress serverTestInstance,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformProgress clientInstance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformProgress clientInstance) {
|
||||
assertThat(serverTestInstance.getTotalDocs(), equalTo(clientInstance.getTotalDocs()));
|
||||
assertThat(serverTestInstance.getDocumentsProcessed(), equalTo(clientInstance.getDocumentsProcessed()));
|
||||
assertThat(serverTestInstance.getPercentComplete(), equalTo(clientInstance.getPercentComplete()));
|
|
@ -34,8 +34,8 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<TransformStats,
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformStats> {
|
||||
public class TransformStatsTests extends AbstractHlrcXContentTestCase<TransformStats,
|
||||
org.elasticsearch.client.transform.transforms.TransformStats> {
|
||||
|
||||
public static NodeAttributes fromHlrc(org.elasticsearch.client.transform.transforms.NodeAttributes attributes) {
|
||||
return attributes == null ? null : new NodeAttributes(attributes.getId(),
|
||||
|
@ -46,45 +46,45 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<T
|
|||
}
|
||||
|
||||
public static TransformStats
|
||||
fromHlrc(org.elasticsearch.client.transform.transforms.DataFrameTransformStats instance) {
|
||||
fromHlrc(org.elasticsearch.client.transform.transforms.TransformStats instance) {
|
||||
|
||||
return new TransformStats(instance.getId(),
|
||||
TransformStats.State.fromString(instance.getState().value()),
|
||||
instance.getReason(),
|
||||
fromHlrc(instance.getNode()),
|
||||
DataFrameIndexerTransformStatsTests.fromHlrc(instance.getIndexerStats()),
|
||||
DataFrameTransformCheckpointingInfoTests.fromHlrc(instance.getCheckpointingInfo()));
|
||||
TransformIndexerStatsTests.fromHlrc(instance.getIndexerStats()),
|
||||
TransformCheckpointingInfoTests.fromHlrc(instance.getCheckpointingInfo()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.elasticsearch.client.transform.transforms.DataFrameTransformStats doHlrcParseInstance(XContentParser parser)
|
||||
public org.elasticsearch.client.transform.transforms.TransformStats doHlrcParseInstance(XContentParser parser)
|
||||
throws IOException {
|
||||
return org.elasticsearch.client.transform.transforms.DataFrameTransformStats.fromXContent(parser);
|
||||
return org.elasticsearch.client.transform.transforms.TransformStats.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TransformStats convertHlrcToInternal(
|
||||
org.elasticsearch.client.transform.transforms.DataFrameTransformStats instance) {
|
||||
org.elasticsearch.client.transform.transforms.TransformStats instance) {
|
||||
return new TransformStats(instance.getId(),
|
||||
TransformStats.State.fromString(instance.getState().value()),
|
||||
instance.getReason(),
|
||||
fromHlrc(instance.getNode()),
|
||||
DataFrameIndexerTransformStatsTests.fromHlrc(instance.getIndexerStats()),
|
||||
DataFrameTransformCheckpointingInfoTests.fromHlrc(instance.getCheckpointingInfo()));
|
||||
TransformIndexerStatsTests.fromHlrc(instance.getIndexerStats()),
|
||||
TransformCheckpointingInfoTests.fromHlrc(instance.getCheckpointingInfo()));
|
||||
}
|
||||
|
||||
public static TransformStats randomDataFrameTransformStats() {
|
||||
public static TransformStats randomTransformStats() {
|
||||
return new TransformStats(randomAlphaOfLength(10),
|
||||
randomFrom(TransformStats.State.values()),
|
||||
randomBoolean() ? null : randomAlphaOfLength(100),
|
||||
randomBoolean() ? null : randomNodeAttributes(),
|
||||
randomStats(),
|
||||
DataFrameTransformCheckpointingInfoTests.randomDataFrameTransformCheckpointingInfo());
|
||||
TransformCheckpointingInfoTests.randomTransformCheckpointingInfo());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TransformStats createTestInstance() {
|
||||
return randomDataFrameTransformStats();
|
||||
return randomTransformStats();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -97,7 +97,7 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<T
|
|||
return field -> field.contains("position") || field.equals("node.attributes");
|
||||
}
|
||||
|
||||
public static TransformProgress randomDataFrameTransformProgress() {
|
||||
public static TransformProgress randomTransformProgress() {
|
||||
Long totalDocs = randomBoolean() ? null : randomNonNegativeLong();
|
||||
Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null;
|
||||
return new TransformProgress(
|
||||
|
@ -107,16 +107,16 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase<T
|
|||
randomBoolean() ? null : randomNonNegativeLong());
|
||||
}
|
||||
|
||||
public static TransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() {
|
||||
return new TransformCheckpointingInfo(randomDataFrameTransformCheckpointStats(),
|
||||
randomDataFrameTransformCheckpointStats(), randomNonNegativeLong(),
|
||||
public static TransformCheckpointingInfo randomTransformCheckpointingInfo() {
|
||||
return new TransformCheckpointingInfo(randomTransformCheckpointStats(),
|
||||
randomTransformCheckpointStats(), randomNonNegativeLong(),
|
||||
randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong()));
|
||||
}
|
||||
|
||||
public static TransformCheckpointStats randomDataFrameTransformCheckpointStats() {
|
||||
public static TransformCheckpointStats randomTransformCheckpointStats() {
|
||||
return new TransformCheckpointStats(randomLongBetween(1, 1_000_000),
|
||||
DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(),
|
||||
randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(),
|
||||
TransformIndexerPositionTests.randomTransformIndexerPosition(),
|
||||
randomBoolean() ? null : TransformProgressTests.randomTransformProgress(),
|
||||
randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000));
|
||||
}
|
||||
|
|
@ -395,6 +395,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
}
|
||||
return copySpec {
|
||||
from project.jdks."bundled_${platform}"
|
||||
exclude "demo/**"
|
||||
eachFile { FileCopyDetails details ->
|
||||
if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') {
|
||||
details.mode = 0755
|
||||
|
|
|
@ -6,6 +6,8 @@ import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin
|
|||
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||
apply plugin: 'elasticsearch.test.fixtures'
|
||||
|
||||
testFixtures.useFixture()
|
||||
|
||||
configurations {
|
||||
dockerPlugins
|
||||
dockerSource
|
||||
|
|
|
@ -407,163 +407,168 @@ subprojects {
|
|||
'default' buildDist
|
||||
}
|
||||
|
||||
// sanity checks if packages can be extracted
|
||||
final File extractionDir = new File(buildDir, 'extracted')
|
||||
File packageExtractionDir
|
||||
if (project.name.contains('deb')) {
|
||||
packageExtractionDir = new File(extractionDir, 'deb-extracted')
|
||||
} else {
|
||||
assert project.name.contains('rpm')
|
||||
packageExtractionDir = new File(extractionDir, 'rpm-extracted')
|
||||
}
|
||||
task checkExtraction(type: LoggedExec) {
|
||||
dependsOn buildDist
|
||||
doFirst {
|
||||
project.delete(extractionDir)
|
||||
extractionDir.mkdirs()
|
||||
}
|
||||
}
|
||||
check.dependsOn checkExtraction
|
||||
if (project.name.contains('deb')) {
|
||||
checkExtraction {
|
||||
onlyIf dpkgExists
|
||||
commandLine 'dpkg-deb', '-x', "${-> buildDist.outputs.files.filter(debFilter).singleFile}", packageExtractionDir
|
||||
}
|
||||
} else {
|
||||
assert project.name.contains('rpm')
|
||||
checkExtraction {
|
||||
onlyIf rpmExists
|
||||
final File rpmDatabase = new File(extractionDir, 'rpm-database')
|
||||
commandLine 'rpm',
|
||||
'--badreloc',
|
||||
'--nodeps',
|
||||
'--noscripts',
|
||||
'--notriggers',
|
||||
'--dbpath',
|
||||
rpmDatabase,
|
||||
'--relocate',
|
||||
"/=${packageExtractionDir}",
|
||||
'-i',
|
||||
"${-> buildDist.outputs.files.singleFile}"
|
||||
}
|
||||
}
|
||||
if (dpkgExists() || rpmExists()) {
|
||||
|
||||
task checkLicense {
|
||||
dependsOn buildDist, checkExtraction
|
||||
}
|
||||
check.dependsOn checkLicense
|
||||
if (project.name.contains('deb')) {
|
||||
checkLicense {
|
||||
onlyIf dpkgExists
|
||||
doLast {
|
||||
Path copyrightPath
|
||||
String expectedLicense
|
||||
String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright")
|
||||
expectedLicense = "ASL-2.0"
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
} else {
|
||||
copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright")
|
||||
expectedLicense = "Elastic-License"
|
||||
licenseFilename = "ELASTIC-LICENSE.txt"
|
||||
}
|
||||
final List<String> header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/",
|
||||
"Copyright: Elasticsearch B.V. <info@elastic.co>",
|
||||
"License: " + expectedLicense)
|
||||
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
|
||||
final List<String> expectedLines = header + licenseLines.collect { " " + it }
|
||||
assertLinesInFile(copyrightPath, expectedLines)
|
||||
// sanity checks if packages can be extracted
|
||||
final File extractionDir = new File(buildDir, 'extracted')
|
||||
File packageExtractionDir
|
||||
if (project.name.contains('deb')) {
|
||||
packageExtractionDir = new File(extractionDir, 'deb-extracted')
|
||||
} else {
|
||||
assert project.name.contains('rpm')
|
||||
packageExtractionDir = new File(extractionDir, 'rpm-extracted')
|
||||
}
|
||||
task checkExtraction(type: LoggedExec) {
|
||||
dependsOn buildDist
|
||||
doFirst {
|
||||
project.delete(extractionDir)
|
||||
extractionDir.mkdirs()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
assert project.name.contains('rpm')
|
||||
checkLicense {
|
||||
onlyIf rpmExists
|
||||
doLast {
|
||||
String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
} else {
|
||||
licenseFilename = "ELASTIC-LICENSE.txt"
|
||||
}
|
||||
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
|
||||
final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt")
|
||||
assertLinesInFile(licensePath, licenseLines)
|
||||
|
||||
check.dependsOn checkExtraction
|
||||
if (project.name.contains('deb')) {
|
||||
checkExtraction {
|
||||
onlyIf dpkgExists
|
||||
commandLine 'dpkg-deb', '-x', "${-> buildDist.outputs.files.filter(debFilter).singleFile}", packageExtractionDir
|
||||
}
|
||||
} else {
|
||||
assert project.name.contains('rpm')
|
||||
checkExtraction {
|
||||
onlyIf rpmExists
|
||||
final File rpmDatabase = new File(extractionDir, 'rpm-database')
|
||||
commandLine 'rpm',
|
||||
'--badreloc',
|
||||
'--nodeps',
|
||||
'--noscripts',
|
||||
'--notriggers',
|
||||
'--dbpath',
|
||||
rpmDatabase,
|
||||
'--relocate',
|
||||
"/=${packageExtractionDir}",
|
||||
'-i',
|
||||
"${-> buildDist.outputs.files.singleFile}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task checkNotice {
|
||||
dependsOn buildDist, checkExtraction
|
||||
onlyIf { (project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it)) }
|
||||
doLast {
|
||||
final List<String> noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch")
|
||||
final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt")
|
||||
assertLinesInFile(noticePath, noticeLines)
|
||||
task checkLicense {
|
||||
dependsOn buildDist, checkExtraction
|
||||
}
|
||||
}
|
||||
check.dependsOn checkNotice
|
||||
|
||||
task checkLicenseMetadata(type: LoggedExec) {
|
||||
dependsOn buildDist, checkExtraction
|
||||
}
|
||||
check.dependsOn checkLicenseMetadata
|
||||
if (project.name.contains('deb')) {
|
||||
checkLicenseMetadata { LoggedExec exec ->
|
||||
onlyIf dpkgExists
|
||||
final ByteArrayOutputStream output = new ByteArrayOutputStream()
|
||||
exec.commandLine 'dpkg-deb', '--info', "${ -> buildDist.outputs.files.filter(debFilter).singleFile}"
|
||||
exec.standardOutput = output
|
||||
doLast {
|
||||
String expectedLicense
|
||||
if (project.name.contains('oss-')) {
|
||||
expectedLicense = "ASL-2.0"
|
||||
} else {
|
||||
expectedLicense = "Elastic-License"
|
||||
check.dependsOn checkLicense
|
||||
if (project.name.contains('deb')) {
|
||||
checkLicense {
|
||||
onlyIf dpkgExists
|
||||
doLast {
|
||||
Path copyrightPath
|
||||
String expectedLicense
|
||||
String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright")
|
||||
expectedLicense = "ASL-2.0"
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
} else {
|
||||
copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright")
|
||||
expectedLicense = "Elastic-License"
|
||||
licenseFilename = "ELASTIC-LICENSE.txt"
|
||||
}
|
||||
final List<String> header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/",
|
||||
"Copyright: Elasticsearch B.V. <info@elastic.co>",
|
||||
"License: " + expectedLicense)
|
||||
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
|
||||
final List<String> expectedLines = header + licenseLines.collect { " " + it }
|
||||
assertLinesInFile(copyrightPath, expectedLines)
|
||||
}
|
||||
final Pattern pattern = Pattern.compile("\\s*License: (.+)")
|
||||
final String info = output.toString('UTF-8')
|
||||
final String[] actualLines = info.split("\n")
|
||||
int count = 0
|
||||
for (final String actualLine : actualLines) {
|
||||
final Matcher matcher = pattern.matcher(actualLine)
|
||||
if (matcher.matches()) {
|
||||
count++
|
||||
final String actualLicense = matcher.group(1)
|
||||
if (expectedLicense != actualLicense) {
|
||||
throw new GradleException("expected license [${expectedLicense} for package info but found [${actualLicense}]")
|
||||
}
|
||||
} else {
|
||||
assert project.name.contains('rpm')
|
||||
checkLicense {
|
||||
onlyIf rpmExists
|
||||
doLast {
|
||||
String licenseFilename
|
||||
if (project.name.contains('oss-')) {
|
||||
licenseFilename = "APACHE-LICENSE-2.0.txt"
|
||||
} else {
|
||||
licenseFilename = "ELASTIC-LICENSE.txt"
|
||||
}
|
||||
final List<String> licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename))
|
||||
final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt")
|
||||
assertLinesInFile(licensePath, licenseLines)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task checkNotice {
|
||||
dependsOn buildDist, checkExtraction
|
||||
onlyIf {
|
||||
(project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it))
|
||||
}
|
||||
doLast {
|
||||
final List<String> noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch")
|
||||
final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt")
|
||||
assertLinesInFile(noticePath, noticeLines)
|
||||
}
|
||||
}
|
||||
check.dependsOn checkNotice
|
||||
|
||||
task checkLicenseMetadata(type: LoggedExec) {
|
||||
dependsOn buildDist, checkExtraction
|
||||
}
|
||||
check.dependsOn checkLicenseMetadata
|
||||
if (project.name.contains('deb')) {
|
||||
checkLicenseMetadata { LoggedExec exec ->
|
||||
onlyIf dpkgExists
|
||||
final ByteArrayOutputStream output = new ByteArrayOutputStream()
|
||||
exec.commandLine 'dpkg-deb', '--info', "${-> buildDist.outputs.files.filter(debFilter).singleFile}"
|
||||
exec.standardOutput = output
|
||||
doLast {
|
||||
String expectedLicense
|
||||
if (project.name.contains('oss-')) {
|
||||
expectedLicense = "ASL-2.0"
|
||||
} else {
|
||||
expectedLicense = "Elastic-License"
|
||||
}
|
||||
final Pattern pattern = Pattern.compile("\\s*License: (.+)")
|
||||
final String info = output.toString('UTF-8')
|
||||
final String[] actualLines = info.split("\n")
|
||||
int count = 0
|
||||
for (final String actualLine : actualLines) {
|
||||
final Matcher matcher = pattern.matcher(actualLine)
|
||||
if (matcher.matches()) {
|
||||
count++
|
||||
final String actualLicense = matcher.group(1)
|
||||
if (expectedLicense != actualLicense) {
|
||||
throw new GradleException("expected license [${expectedLicense} for package info but found [${actualLicense}]")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (count == 0) {
|
||||
throw new GradleException("expected license [${expectedLicense}] for package info but found none in:\n${info}")
|
||||
}
|
||||
if (count > 1) {
|
||||
throw new GradleException("expected a single license for package info but found [${count}] in:\n${info}")
|
||||
if (count == 0) {
|
||||
throw new GradleException("expected license [${expectedLicense}] for package info but found none in:\n${info}")
|
||||
}
|
||||
if (count > 1) {
|
||||
throw new GradleException("expected a single license for package info but found [${count}] in:\n${info}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
assert project.name.contains('rpm')
|
||||
checkLicenseMetadata { LoggedExec exec ->
|
||||
onlyIf rpmExists
|
||||
final ByteArrayOutputStream output = new ByteArrayOutputStream()
|
||||
exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.outputs.files.singleFile}"
|
||||
exec.standardOutput = output
|
||||
doLast {
|
||||
String license = output.toString('UTF-8')
|
||||
String expectedLicense
|
||||
if (project.name.contains('oss-')) {
|
||||
expectedLicense = "ASL 2.0"
|
||||
} else {
|
||||
expectedLicense = "Elastic License"
|
||||
}
|
||||
if (license != expectedLicense) {
|
||||
throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.outputs.files.singleFile}] but was [${license}]")
|
||||
} else {
|
||||
assert project.name.contains('rpm')
|
||||
checkLicenseMetadata { LoggedExec exec ->
|
||||
onlyIf rpmExists
|
||||
final ByteArrayOutputStream output = new ByteArrayOutputStream()
|
||||
exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.outputs.files.singleFile}"
|
||||
exec.standardOutput = output
|
||||
doLast {
|
||||
String license = output.toString('UTF-8')
|
||||
String expectedLicense
|
||||
if (project.name.contains('oss-')) {
|
||||
expectedLicense = "ASL 2.0"
|
||||
} else {
|
||||
expectedLicense = "Elastic License"
|
||||
}
|
||||
if (license != expectedLicense) {
|
||||
throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.outputs.files.singleFile}] but was [${license}]")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -43,7 +43,8 @@
|
|||
# 10-:-XX:-UseConcMarkSweepGC
|
||||
# 10-:-XX:-UseCMSInitiatingOccupancyOnly
|
||||
# 10-:-XX:+UseG1GC
|
||||
# 10-:-XX:InitiatingHeapOccupancyPercent=75
|
||||
# 10-:-XX:G1ReservePercent=25
|
||||
# 10-:-XX:InitiatingHeapOccupancyPercent=30
|
||||
|
||||
## DNS cache policy
|
||||
# cache ttl in seconds for positive DNS lookups noting that this overrides the
|
||||
|
|
|
@ -580,7 +580,7 @@ include::ilm/remove_lifecycle_policy_from_index.asciidoc[]
|
|||
== {transform-cap} APIs
|
||||
|
||||
:upid: {mainid}
|
||||
:doc-tests-file: {doc-tests}/DataFrameTransformDocumentationIT.java
|
||||
:doc-tests-file: {doc-tests}/TransformDocumentationIT.java
|
||||
|
||||
The Java High Level REST Client supports the following {transform}
|
||||
APIs:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
--
|
||||
:api: delete-transform
|
||||
:request: DeleteDataFrameTransformRequest
|
||||
:request: DeleteTransformRequest
|
||||
:response: AcknowledgedResponse
|
||||
--
|
||||
[role="xpack"]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
--
|
||||
:api: get-transform
|
||||
:request: GetDataFrameTransformRequest
|
||||
:response: GetDataFrameTransformResponse
|
||||
:request: GetTransformRequest
|
||||
:response: GetTransformResponse
|
||||
--
|
||||
[role="xpack"]
|
||||
[id="{upid}-{api}"]
|
||||
|
@ -46,4 +46,4 @@ The returned +{response}+ contains the requested {transforms}.
|
|||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
--
|
||||
:api: get-transform-stats
|
||||
:request: GetDataFrameTransformStatsRequest
|
||||
:response: GetDataFrameTransformStatsResponse
|
||||
:request: GetTransformStatsRequest
|
||||
:response: GetTransformStatsResponse
|
||||
--
|
||||
[role="xpack"]
|
||||
[id="{upid}-{api}"]
|
||||
|
@ -48,7 +48,7 @@ The returned +{response}+ contains the requested {transform} statistics.
|
|||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The response contains a list of `DataFrameTransformStats` objects
|
||||
<1> The response contains a list of `TransformStats` objects
|
||||
<2> The running state of the {transform}, for example `started`, `indexing`, etc.
|
||||
<3> The overall {transform} statistics recording the number of documents indexed etc.
|
||||
<4> The progress of the current run in the {transform}. Supplies the number of docs left until the next checkpoint
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
--
|
||||
:api: preview-transform
|
||||
:request: PreviewDataFrameTransformRequest
|
||||
:response: PreviewDataFrameTransformResponse
|
||||
:request: PreviewTransformRequest
|
||||
:response: PreviewTransformResponse
|
||||
--
|
||||
[role="xpack"]
|
||||
[id="{upid}-{api}"]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
--
|
||||
:api: put-transform
|
||||
:request: PutDataFrameTransformRequest
|
||||
:request: PutTransformRequest
|
||||
:response: AcknowledgedResponse
|
||||
--
|
||||
[role="xpack"]
|
||||
|
@ -29,7 +29,7 @@ such an error will not be visible until `_start` is called.
|
|||
[id="{upid}-{api}-config"]
|
||||
==== {transform-cap} configuration
|
||||
|
||||
The `DataFrameTransformConfig` object contains all the details about the
|
||||
The `TransformConfig` object contains all the details about the
|
||||
{transform} configuration and contains the following arguments:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
--
|
||||
:api: start-transform
|
||||
:request: StartDataFrameTransformRequest
|
||||
:response: StartDataFrameTransformResponse
|
||||
:request: StartTransformRequest
|
||||
:response: StartTransformResponse
|
||||
--
|
||||
[role="xpack"]
|
||||
[id="{upid}-{api}"]
|
||||
|
@ -37,4 +37,4 @@ include::../execution.asciidoc[]
|
|||
==== Response
|
||||
|
||||
The returned +{response}+ object acknowledges the {transform} has
|
||||
started.
|
||||
started.
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue