Merge remote-tracking branch 'upstream/master' into index-lifecycle
This commit is contained in:
commit
c9e4d26a53
|
@ -1,87 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.InputFiles
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
import org.gradle.api.tasks.SourceSet
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.api.tasks.util.PatternSet
|
||||
import org.gradle.api.tasks.util.PatternFilterable
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.attribute.PosixFilePermission
|
||||
import java.nio.file.attribute.PosixFileAttributeView
|
||||
|
||||
import static java.nio.file.attribute.PosixFilePermission.OTHERS_EXECUTE
|
||||
import static java.nio.file.attribute.PosixFilePermission.GROUP_EXECUTE
|
||||
import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE
|
||||
|
||||
/**
|
||||
* Checks source files for correct file permissions.
|
||||
*/
|
||||
public class FilePermissionsTask extends DefaultTask {
|
||||
|
||||
/** A pattern set of which files should be checked. */
|
||||
private PatternFilterable filesFilter = new PatternSet()
|
||||
|
||||
@OutputFile
|
||||
File outputMarker = new File(project.buildDir, 'markers/filePermissions')
|
||||
|
||||
FilePermissionsTask() {
|
||||
onlyIf { !Os.isFamily(Os.FAMILY_WINDOWS) }
|
||||
description = "Checks java source files for correct file permissions"
|
||||
// we always include all source files, and exclude what should not be checked
|
||||
filesFilter.include('**')
|
||||
// exclude sh files that might have the executable bit set
|
||||
filesFilter.exclude('**/*.sh')
|
||||
}
|
||||
|
||||
/** Returns the files this task will check */
|
||||
@InputFiles
|
||||
FileCollection files() {
|
||||
List<FileCollection> collections = new ArrayList<>()
|
||||
for (SourceSet sourceSet : project.sourceSets) {
|
||||
collections.add(sourceSet.allSource.matching(filesFilter))
|
||||
}
|
||||
return project.files(collections.toArray())
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
void checkInvalidPermissions() {
|
||||
List<String> failures = new ArrayList<>()
|
||||
for (File f : files()) {
|
||||
PosixFileAttributeView fileAttributeView = Files.getFileAttributeView(f.toPath(), PosixFileAttributeView.class)
|
||||
Set<PosixFilePermission> permissions = fileAttributeView.readAttributes().permissions()
|
||||
if (permissions.contains(OTHERS_EXECUTE) || permissions.contains(OWNER_EXECUTE) ||
|
||||
permissions.contains(GROUP_EXECUTE)) {
|
||||
failures.add("Source file is executable: " + f)
|
||||
}
|
||||
}
|
||||
if (failures.isEmpty() == false) {
|
||||
throw new GradleException('Found invalid file permissions:\n' + failures.join('\n'))
|
||||
}
|
||||
outputMarker.setText('done', 'UTF-8')
|
||||
}
|
||||
|
||||
}
|
|
@ -69,7 +69,11 @@ class ClusterConfiguration {
|
|||
*/
|
||||
@Input
|
||||
Closure<Integer> minimumMasterNodes = {
|
||||
return getNumNodes() > 1 ? getNumNodes() : -1
|
||||
if (bwcVersion != null && bwcVersion.before("6.5.0-SNAPSHOT")) {
|
||||
return numNodes > 1 ? numNodes : -1
|
||||
} else {
|
||||
return numNodes > 1 ? numNodes.intdiv(2) + 1 : -1
|
||||
}
|
||||
}
|
||||
|
||||
@Input
|
||||
|
|
|
@ -343,6 +343,13 @@ class ClusterFormationTasks {
|
|||
// this will also allow new and old nodes in the BWC case to become the master
|
||||
esConfig['discovery.initial_state_timeout'] = '0s'
|
||||
}
|
||||
if (esConfig.containsKey('discovery.zen.master_election.wait_for_joins_timeout') == false) {
|
||||
// If a node decides to become master based on partial information from the pinging, don't let it hang for 30 seconds to correct
|
||||
// its mistake. Instead, only wait 5s to do another round of pinging.
|
||||
// This is necessary since we use 30s as the default timeout in REST requests waiting for cluster formation
|
||||
// so we need to bail quicker than the default 30s for the cluster to form in time.
|
||||
esConfig['discovery.zen.master_election.wait_for_joins_timeout'] = '5s'
|
||||
}
|
||||
esConfig['node.max_local_storage_nodes'] = node.config.numNodes
|
||||
esConfig['http.port'] = node.config.httpPort
|
||||
esConfig['transport.tcp.port'] = node.config.transportPort
|
||||
|
|
|
@ -0,0 +1,114 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.attribute.PosixFileAttributeView;
|
||||
import java.nio.file.attribute.PosixFilePermission;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.tools.ant.taskdefs.condition.Os;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.file.FileTree;
|
||||
import org.gradle.api.plugins.JavaPluginConvention;
|
||||
import org.gradle.api.tasks.InputFiles;
|
||||
import org.gradle.api.tasks.OutputFile;
|
||||
import org.gradle.api.tasks.SkipWhenEmpty;
|
||||
import org.gradle.api.tasks.SourceSetContainer;
|
||||
import org.gradle.api.tasks.StopExecutionException;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.api.tasks.util.PatternFilterable;
|
||||
import org.gradle.api.tasks.util.PatternSet;
|
||||
|
||||
/**
|
||||
* Checks source files for correct file permissions.
|
||||
*/
|
||||
public class FilePermissionsTask extends DefaultTask {
|
||||
|
||||
/**
|
||||
* A pattern set of which files should be checked.
|
||||
*/
|
||||
private final PatternFilterable filesFilter = new PatternSet()
|
||||
// we always include all source files, and exclude what should not be checked
|
||||
.include("**")
|
||||
// exclude sh files that might have the executable bit set
|
||||
.exclude("**/*.sh");
|
||||
|
||||
private File outputMarker = new File(getProject().getBuildDir(), "markers/filePermissions");
|
||||
|
||||
public FilePermissionsTask() {
|
||||
setDescription("Checks java source files for correct file permissions");
|
||||
}
|
||||
|
||||
private static boolean isExecutableFile(File file) {
|
||||
try {
|
||||
Set<PosixFilePermission> permissions = Files.getFileAttributeView(file.toPath(), PosixFileAttributeView.class)
|
||||
.readAttributes()
|
||||
.permissions();
|
||||
return permissions.contains(PosixFilePermission.OTHERS_EXECUTE)
|
||||
|| permissions.contains(PosixFilePermission.OWNER_EXECUTE)
|
||||
|| permissions.contains(PosixFilePermission.GROUP_EXECUTE);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("unable to read the file " + file + " attributes", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the files this task will check
|
||||
*/
|
||||
@InputFiles
|
||||
@SkipWhenEmpty
|
||||
public FileCollection getFiles() {
|
||||
SourceSetContainer sourceSets = getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
|
||||
return sourceSets.stream()
|
||||
.map(sourceSet -> sourceSet.getAllSource().matching(filesFilter))
|
||||
.reduce(FileTree::plus)
|
||||
.orElse(getProject().files().getAsFileTree());
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void checkInvalidPermissions() throws IOException {
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
throw new StopExecutionException();
|
||||
}
|
||||
List<String> failures = getFiles().getFiles().stream()
|
||||
.filter(FilePermissionsTask::isExecutableFile)
|
||||
.map(file -> "Source file is executable: " + file)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (!failures.isEmpty()) {
|
||||
throw new GradleException("Found invalid file permissions:\n" + String.join("\n", failures));
|
||||
}
|
||||
|
||||
outputMarker.getParentFile().mkdirs();
|
||||
Files.write(outputMarker.toPath(), "done".getBytes("UTF-8"));
|
||||
}
|
||||
|
||||
@OutputFile
|
||||
public File getOutputMarker() {
|
||||
return outputMarker;
|
||||
}
|
||||
|
||||
}
|
|
@ -25,7 +25,6 @@
|
|||
Truly temporary suppressions suppression of snippets included in
|
||||
documentation that are so wide that they scroll.
|
||||
-->
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]CRUDDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]ClusterClientDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]GraphDocumentationIT.java" id="SnippetLength" />
|
||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]IndicesClientDocumentationIT.java" id="SnippetLength" />
|
||||
|
@ -78,39 +77,8 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]stats[/\\]TransportClusterStatsAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]PendingClusterTasksRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]TransportPendingClusterTasksAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BackoffPolicy.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkProcessor.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkResponse.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]TransportBulkAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]TransportShardBulkAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]DeleteRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]explain[/\\]TransportExplainAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]GetRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]TransportGetAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]TransportShardMultiGetAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]index[/\\]IndexRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]MultiSearchRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchPhaseController.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]DelegatingActionListener.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]IndicesOptions.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]BroadcastOperationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]TransportBroadcastAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]node[/\\]TransportBroadcastByNodeAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]AcknowledgedRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]MasterNodeOperationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]MasterNodeReadOperationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]ClusterInfoRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]ClusterInfoRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]TransportClusterInfoAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]nodes[/\\]NodesOperationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]ReplicationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]TransportBroadcastReplicationAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]InstanceShardOperationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]TransportInstanceSingleOperationAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]shard[/\\]SingleShardOperationRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]shard[/\\]TransportSingleShardAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsResponse.java" checks="LineLength" />
|
||||
|
@ -121,10 +89,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]TransportUpdateAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNANatives.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]FilterClient.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]support[/\\]AbstractClient.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClient.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateObserver.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateUpdateTask.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]DiffableUtils.java" checks="LineLength" />
|
||||
|
@ -159,15 +123,6 @@
|
|||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]AllocationCommands.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]command[/\\]MoveAllocationCommand.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]decider[/\\]AllocationDeciders.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]Discovery.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]DiscoverySettings.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]ZenDiscovery.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayAllocator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayMetaState.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayService.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]LocalAllocateDangledIndices.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PrimaryShardAllocator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReplicaShardAllocator.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicyConfig.java" checks="LineLength" />
|
||||
|
@ -291,21 +246,12 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]SnapshotBlocksIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]state[/\\]ClusterStateRequestTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]stats[/\\]ClusterStatsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkRequestTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]RetryTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetShardRequestTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchRequestBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]WaitActiveShardCountIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]node[/\\]TransportBroadcastByNodeActionTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]TransportMasterNodeActionTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]BroadcastReplicationTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]TransportInstanceSingleOperationActionTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]AbstractTermVectorsTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]GetTermVectorsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsUnitTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]aliases[/\\]IndexAliasesIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]AbstractClientHeadersTestCase.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterHealthIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterInfoServiceIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateDiffIT.java" checks="LineLength" />
|
||||
|
@ -376,20 +322,9 @@
|
|||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]shards[/\\]ClusterSearchShardsIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]structure[/\\]RoutingIteratorTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]deps[/\\]joda[/\\]SimpleJodaTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]BlockingClusterStatePublishResponseHandlerTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]ZenDiscoveryUnitTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]EnvironmentTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]NodeEnvironmentTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]explain[/\\]ExplainActionIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayServiceTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]MetaDataStateFormatTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]MetaDataWriteDataNodesIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PrimaryShardAllocatorTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PriorityComparatorTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]QuorumGatewayIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]RecoveryFromGatewayIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReplicaShardAllocatorTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReusePeerRecoverySharedTest.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]get[/\\]GetActionIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLogTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicySettingsTests.java" checks="LineLength" />
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.util.List;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedTest;
|
||||
import org.apache.tools.ant.taskdefs.condition.Os;
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.plugins.JavaPlugin;
|
||||
import org.gradle.testfixtures.ProjectBuilder;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
public class FilePermissionsTaskTests extends GradleUnitTestCase {
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
public void testCheckPermissionsWhenAnExecutableFileExists() throws Exception {
|
||||
RandomizedTest.assumeFalse("Functionality is Unix specific", Os.isFamily(Os.FAMILY_WINDOWS));
|
||||
|
||||
Project project = createProject();
|
||||
|
||||
FilePermissionsTask filePermissionsTask = createTask(project);
|
||||
|
||||
File file = new File(project.getProjectDir(), "src/main/java/Code.java");
|
||||
file.getParentFile().mkdirs();
|
||||
file.createNewFile();
|
||||
file.setExecutable(true);
|
||||
|
||||
try {
|
||||
filePermissionsTask.checkInvalidPermissions();
|
||||
Assert.fail("the check should have failed because of the executable file permission");
|
||||
} catch (GradleException e) {
|
||||
assertTrue(e.getMessage().startsWith("Found invalid file permissions"));
|
||||
}
|
||||
file.delete();
|
||||
}
|
||||
|
||||
|
||||
public void testCheckPermissionsWhenNoFileExists() throws Exception {
|
||||
RandomizedTest.assumeFalse("Functionality is Unix specific", Os.isFamily(Os.FAMILY_WINDOWS));
|
||||
|
||||
Project project = createProject();
|
||||
|
||||
FilePermissionsTask filePermissionsTask = createTask(project);
|
||||
|
||||
filePermissionsTask.checkInvalidPermissions();
|
||||
|
||||
File outputMarker = new File(project.getBuildDir(), "markers/filePermissions");
|
||||
List<String> result = Files.readAllLines(outputMarker.toPath(), Charset.forName("UTF-8"));
|
||||
assertEquals("done", result.get(0));
|
||||
}
|
||||
|
||||
public void testCheckPermissionsWhenNoExecutableFileExists() throws Exception {
|
||||
RandomizedTest.assumeFalse("Functionality is Unix specific", Os.isFamily(Os.FAMILY_WINDOWS));
|
||||
|
||||
Project project = createProject();
|
||||
|
||||
FilePermissionsTask filePermissionsTask = createTask(project);
|
||||
|
||||
File file = new File(project.getProjectDir(), "src/main/java/Code.java");
|
||||
file.getParentFile().mkdirs();
|
||||
file.createNewFile();
|
||||
|
||||
filePermissionsTask.checkInvalidPermissions();
|
||||
|
||||
File outputMarker = new File(project.getBuildDir(), "markers/filePermissions");
|
||||
List<String> result = Files.readAllLines(outputMarker.toPath(), Charset.forName("UTF-8"));
|
||||
assertEquals("done", result.get(0));
|
||||
|
||||
file.delete();
|
||||
|
||||
}
|
||||
|
||||
private Project createProject() throws IOException {
|
||||
Project project = ProjectBuilder.builder().withProjectDir(temporaryFolder.newFolder()).build();
|
||||
project.getPlugins().apply(JavaPlugin.class);
|
||||
return project;
|
||||
}
|
||||
|
||||
private FilePermissionsTask createTask(Project project) {
|
||||
return project.getTasks().create("filePermissionsTask", FilePermissionsTask.class);
|
||||
}
|
||||
}
|
|
@ -51,7 +51,6 @@ dependencies {
|
|||
compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}"
|
||||
compile "org.elasticsearch.plugin:rank-eval-client:${version}"
|
||||
compile "org.elasticsearch.plugin:lang-mustache-client:${version}"
|
||||
bundle project(':x-pack:protocol')
|
||||
|
||||
testCompile "org.elasticsearch.client:test:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
|
@ -88,6 +87,7 @@ integTestCluster {
|
|||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setting 'xpack.security.authc.token.enabled', 'true'
|
||||
// Truststore settings are not used since TLS is not enabled. Included for testing the get certificates API
|
||||
setting 'xpack.ssl.certificate_authorities', 'testnode.crt'
|
||||
setting 'xpack.security.transport.ssl.truststore.path', 'testnode.jks'
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.client.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.client.graph.GraphExploreResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.client.graph.GraphExploreRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -52,6 +52,7 @@ import org.elasticsearch.client.ml.PutDatafeedRequest;
|
|||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.StartDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.StopDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -209,6 +210,19 @@ final class MLRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request updateDatafeed(UpdateDatafeedRequest updateDatafeedRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(updateDatafeedRequest.getDatafeedUpdate().getId())
|
||||
.addPathPartAsIs("_update")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(updateDatafeedRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getDatafeed(GetDatafeedRequest getDatafeedRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
|
|
|
@ -67,6 +67,7 @@ import org.elasticsearch.client.ml.StartDatafeedRequest;
|
|||
import org.elasticsearch.client.ml.StartDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.StopDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.StopDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.job.stats.JobStats;
|
||||
|
||||
|
@ -494,6 +495,46 @@ public final class MachineLearningClient {
|
|||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning Datafeed
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html">
|
||||
* ML Update datafeed documentation</a>
|
||||
*
|
||||
* @param request The UpdateDatafeedRequest containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return PutDatafeedResponse with enclosed, updated {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} object
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PutDatafeedResponse updateDatafeed(UpdateDatafeedRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::updateDatafeed,
|
||||
options,
|
||||
PutDatafeedResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning Datafeed asynchronously and notifies listener on completion
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html">
|
||||
* ML Update datafeed documentation</a>
|
||||
*
|
||||
* @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void updateDatafeedAsync(UpdateDatafeedRequest request, RequestOptions options, ActionListener<PutDatafeedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::updateDatafeed,
|
||||
options,
|
||||
PutDatafeedResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets one or more Machine Learning datafeed configuration info.
|
||||
*
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
import org.elasticsearch.client.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.client.migration.IndexUpgradeInfoResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.client.migration.IndexUpgradeInfoRequest;
|
||||
|
||||
final class MigrationRequestConverters {
|
||||
|
||||
|
@ -28,7 +28,7 @@ final class MigrationRequestConverters {
|
|||
|
||||
static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) {
|
||||
RequestConverters.EndpointBuilder endpointBuilder = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/migration/assistance")
|
||||
.addPathPartAsIs("_xpack", "migration", "assistance")
|
||||
.addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices());
|
||||
String endpoint = endpointBuilder.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
|
|
@ -706,10 +706,10 @@ final class RequestConverters {
|
|||
putParam("_source", Boolean.FALSE.toString());
|
||||
}
|
||||
if (fetchSourceContext.includes() != null && fetchSourceContext.includes().length > 0) {
|
||||
putParam("_source_include", String.join(",", fetchSourceContext.includes()));
|
||||
putParam("_source_includes", String.join(",", fetchSourceContext.includes()));
|
||||
}
|
||||
if (fetchSourceContext.excludes() != null && fetchSourceContext.excludes().length > 0) {
|
||||
putParam("_source_exclude", String.join(",", fetchSourceContext.excludes()));
|
||||
putParam("_source_excludes", String.join(",", fetchSourceContext.excludes()));
|
||||
}
|
||||
}
|
||||
return this;
|
||||
|
|
|
@ -28,6 +28,8 @@ import org.elasticsearch.client.rollup.GetRollupCapsRequest;
|
|||
import org.elasticsearch.client.rollup.GetRollupCapsResponse;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
@ -80,6 +82,40 @@ public class RollupClient {
|
|||
listener, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a rollup job
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-start-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public StartRollupJobResponse startRollupJob(StartRollupJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
RollupRequestConverters::startJob,
|
||||
options,
|
||||
StartRollupJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously start a rollup job
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-start-job.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void startRollupJobAsync(StartRollupJobRequest request, RequestOptions options,
|
||||
ActionListener<StartRollupJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
RollupRequestConverters::startJob,
|
||||
options,
|
||||
StartRollupJobResponse::fromXContent,
|
||||
listener, Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a rollup job from the cluster
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-delete-job.html">
|
||||
|
|
|
@ -20,11 +20,13 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsRequest;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -38,9 +40,7 @@ final class RollupRequestConverters {
|
|||
|
||||
static Request putJob(final PutRollupJobRequest putRollupJobRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("rollup")
|
||||
.addPathPartAsIs("job")
|
||||
.addPathPartAsIs("_xpack", "rollup", "job")
|
||||
.addPathPart(putRollupJobRequest.getConfig().getId())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
@ -48,11 +48,19 @@ final class RollupRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request startJob(final StartRollupJobRequest startRollupJobRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack", "rollup", "job")
|
||||
.addPathPart(startRollupJobRequest.getJobId())
|
||||
.addPathPartAsIs("_start")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getJob(final GetRollupJobRequest getRollupJobRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("rollup")
|
||||
.addPathPartAsIs("job")
|
||||
.addPathPartAsIs("_xpack", "rollup", "job")
|
||||
.addPathPart(getRollupJobRequest.getJobId())
|
||||
.build();
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
@ -60,9 +68,7 @@ final class RollupRequestConverters {
|
|||
|
||||
static Request deleteJob(final DeleteRollupJobRequest deleteRollupJobRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("rollup")
|
||||
.addPathPartAsIs("job")
|
||||
.addPathPartAsIs("_xpack", "rollup", "job")
|
||||
.addPathPart(deleteRollupJobRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
@ -72,9 +78,7 @@ final class RollupRequestConverters {
|
|||
|
||||
static Request getRollupCaps(final GetRollupCapsRequest getRollupCapsRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("rollup")
|
||||
.addPathPartAsIs("data")
|
||||
.addPathPartAsIs("_xpack", "rollup", "data")
|
||||
.addPathPart(getRollupCapsRequest.getIndexPattern())
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
|
|
@ -20,22 +20,26 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||
import org.elasticsearch.client.security.ClearRolesCacheRequest;
|
||||
import org.elasticsearch.client.security.ClearRolesCacheResponse;
|
||||
import org.elasticsearch.client.security.CreateTokenRequest;
|
||||
import org.elasticsearch.client.security.CreateTokenResponse;
|
||||
import org.elasticsearch.client.security.DeleteRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleMappingResponse;
|
||||
import org.elasticsearch.client.security.DeleteRoleRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleResponse;
|
||||
import org.elasticsearch.client.security.PutRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.PutRoleMappingResponse;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EmptyResponse;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.GetRoleMappingsRequest;
|
||||
import org.elasticsearch.client.security.GetRoleMappingsResponse;
|
||||
import org.elasticsearch.client.security.GetSslCertificatesRequest;
|
||||
import org.elasticsearch.client.security.GetSslCertificatesResponse;
|
||||
import org.elasticsearch.client.security.PutRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.PutRoleMappingResponse;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserResponse;
|
||||
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleMappingResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -112,6 +116,40 @@ public final class SecurityClient {
|
|||
PutRoleMappingResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronously get role mapping(s).
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request {@link GetRoleMappingsRequest} with role mapping name(s).
|
||||
* If no role mapping name is provided then retrieves all role mappings.
|
||||
* @param options the request options (e.g. headers), use
|
||||
* {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response from the get role mapping call
|
||||
* @throws IOException in case there is a problem sending the request or
|
||||
* parsing back the response
|
||||
*/
|
||||
public GetRoleMappingsResponse getRoleMappings(final GetRoleMappingsRequest request, final RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::getRoleMappings,
|
||||
options, GetRoleMappingsResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously get role mapping(s).
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request {@link GetRoleMappingsRequest} with role mapping name(s).
|
||||
* If no role mapping name is provided then retrieves all role mappings.
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getRoleMappingsAsync(final GetRoleMappingsRequest request, final RequestOptions options,
|
||||
final ActionListener<GetRoleMappingsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::getRoleMappings,
|
||||
options, GetRoleMappingsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable a native realm or built-in user synchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
||||
|
@ -314,4 +352,32 @@ public final class SecurityClient {
|
|||
DeleteRoleResponse::fromXContent, listener, singleton(404));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an OAuth2 token.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-token.html">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the request for the token
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response from the create token call
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public CreateTokenResponse createToken(CreateTokenRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::createToken, options,
|
||||
CreateTokenResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously creates an OAuth2 token.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-token.html">
|
||||
* the docs</a> for more.
|
||||
*
|
||||
* @param request the request for the token
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void createTokenAsync(CreateTokenRequest request, RequestOptions options, ActionListener<CreateTokenResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::createToken, options,
|
||||
CreateTokenResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,18 +19,22 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.security.ClearRolesCacheRequest;
|
||||
import org.elasticsearch.client.security.CreateTokenRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleRequest;
|
||||
import org.elasticsearch.client.security.PutRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.GetRoleMappingsRequest;
|
||||
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
import org.elasticsearch.client.security.SetUserEnabledRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -78,6 +82,15 @@ final class SecurityRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request getRoleMappings(final GetRoleMappingsRequest getRoleMappingRequest) throws IOException {
|
||||
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder();
|
||||
builder.addPathPartAsIs("_xpack/security/role_mapping");
|
||||
if (getRoleMappingRequest.getRoleMappingNames().size() > 0) {
|
||||
builder.addPathPart(Strings.collectionToCommaDelimitedString(getRoleMappingRequest.getRoleMappingNames()));
|
||||
}
|
||||
return new Request(HttpGet.METHOD_NAME, builder.build());
|
||||
}
|
||||
|
||||
static Request enableUser(EnableUserRequest enableUserRequest) {
|
||||
return setUserEnabled(enableUserRequest);
|
||||
}
|
||||
|
@ -128,4 +141,10 @@ final class SecurityRequestConverters {
|
|||
params.withRefreshPolicy(deleteRoleRequest.getRefreshPolicy());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createToken(CreateTokenRequest createTokenRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_xpack/security/oauth2/token");
|
||||
request.setEntity(createEntity(createTokenRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,10 +28,10 @@ import org.elasticsearch.client.watcher.AckWatchRequest;
|
|||
import org.elasticsearch.client.watcher.AckWatchResponse;
|
||||
import org.elasticsearch.client.watcher.StartWatchServiceRequest;
|
||||
import org.elasticsearch.client.watcher.StopWatchServiceRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchResponse;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -30,8 +30,8 @@ import org.elasticsearch.client.watcher.AckWatchRequest;
|
|||
import org.elasticsearch.client.watcher.StartWatchServiceRequest;
|
||||
import org.elasticsearch.client.watcher.StopWatchServiceRequest;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
|
||||
final class WatcherRequestConverters {
|
||||
|
||||
|
@ -59,9 +59,7 @@ final class WatcherRequestConverters {
|
|||
|
||||
static Request putWatch(PutWatchRequest putWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPartAsIs("_xpack", "watcher", "watch")
|
||||
.addPathPart(putWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
|
@ -89,9 +87,7 @@ final class WatcherRequestConverters {
|
|||
|
||||
static Request deleteWatch(DeleteWatchRequest deleteWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPartAsIs("_xpack", "watcher", "watch")
|
||||
.addPathPart(deleteWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
|
@ -101,9 +97,7 @@ final class WatcherRequestConverters {
|
|||
|
||||
public static Request ackWatch(AckWatchRequest ackWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPartAsIs("_xpack", "watcher", "watch")
|
||||
.addPathPart(ackWatchRequest.getWatchId())
|
||||
.addPathPartAsIs("_ack")
|
||||
.addCommaSeparatedPathParts(ackWatchRequest.getActionIds())
|
||||
|
@ -114,9 +108,7 @@ final class WatcherRequestConverters {
|
|||
|
||||
static Request activateWatch(ActivateWatchRequest activateWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPartAsIs("_xpack", "watcher", "watch")
|
||||
.addPathPart(activateWatchRequest.getWatchId())
|
||||
.addPathPartAsIs("_activate")
|
||||
.build();
|
||||
|
|
|
@ -16,22 +16,19 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
package org.elasticsearch.client.graph;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId;
|
||||
import org.elasticsearch.client.graph.Vertex.VertexId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
@ -55,25 +52,9 @@ public class Connection {
|
|||
this.docCount = docCount;
|
||||
}
|
||||
|
||||
public Connection(StreamInput in, Map<VertexId, Vertex> vertices) throws IOException {
|
||||
from = vertices.get(new VertexId(in.readString(), in.readString()));
|
||||
to = vertices.get(new VertexId(in.readString(), in.readString()));
|
||||
weight = in.readDouble();
|
||||
docCount = in.readVLong();
|
||||
}
|
||||
|
||||
Connection() {
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(from.getField());
|
||||
out.writeString(from.getTerm());
|
||||
out.writeString(to.getField());
|
||||
out.writeString(to.getTerm());
|
||||
out.writeDouble(weight);
|
||||
out.writeVLong(docCount);
|
||||
}
|
||||
|
||||
public ConnectionId getId() {
|
||||
return new ConnectionId(from.getId(), to.getId());
|
||||
}
|
|
@ -16,13 +16,12 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
package org.elasticsearch.client.graph;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -37,14 +36,14 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Holds the criteria required to guide the exploration of connected terms which
|
||||
* can be returned as a graph.
|
||||
*/
|
||||
public class GraphExploreRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject {
|
||||
public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXContentObject, Validatable {
|
||||
|
||||
public static final String NO_HOPS_ERROR_MESSAGE = "Graph explore request must have at least one hop";
|
||||
public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest";
|
||||
|
@ -74,15 +73,15 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
|||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
public Optional<ValidationException> validate() {
|
||||
ValidationException validationException = new ValidationException();
|
||||
if (hops.size() == 0) {
|
||||
validationException = ValidateActions.addValidationError(NO_HOPS_ERROR_MESSAGE, validationException);
|
||||
validationException.addValidationError(NO_HOPS_ERROR_MESSAGE);
|
||||
}
|
||||
for (Hop hop : hops) {
|
||||
validationException = hop.validate(validationException);
|
||||
hop.validate(validationException);
|
||||
}
|
||||
return validationException;
|
||||
return validationException.validationErrors().isEmpty() ? Optional.empty() : Optional.of(validationException);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -159,55 +158,6 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
||||
indices = in.readStringArray();
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
types = in.readStringArray();
|
||||
routing = in.readOptionalString();
|
||||
timeout = in.readOptionalTimeValue();
|
||||
sampleSize = in.readInt();
|
||||
sampleDiversityField = in.readOptionalString();
|
||||
maxDocsPerDiversityValue = in.readInt();
|
||||
|
||||
useSignificance = in.readBoolean();
|
||||
returnDetailedInfo = in.readBoolean();
|
||||
|
||||
int numHops = in.readInt();
|
||||
Hop parentHop = null;
|
||||
for (int i = 0; i < numHops; i++) {
|
||||
Hop hop = new Hop(parentHop);
|
||||
hop.readFrom(in);
|
||||
hops.add(hop);
|
||||
parentHop = hop;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringArray(indices);
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
out.writeStringArray(types);
|
||||
out.writeOptionalString(routing);
|
||||
out.writeOptionalTimeValue(timeout);
|
||||
|
||||
out.writeInt(sampleSize);
|
||||
out.writeOptionalString(sampleDiversityField);
|
||||
out.writeInt(maxDocsPerDiversityValue);
|
||||
|
||||
out.writeBoolean(useSignificance);
|
||||
out.writeBoolean(returnDetailedInfo);
|
||||
out.writeInt(hops.size());
|
||||
for (Iterator<Hop> iterator = hops.iterator(); iterator.hasNext();) {
|
||||
Hop hop = iterator.next();
|
||||
hop.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "graph explore [" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "]";
|
|
@ -16,24 +16,21 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
package org.elasticsearch.client.graph;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId;
|
||||
import org.elasticsearch.protocol.xpack.graph.Connection.UnresolvedConnection;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId;
|
||||
import org.elasticsearch.client.graph.Connection.ConnectionId;
|
||||
import org.elasticsearch.client.graph.Connection.UnresolvedConnection;
|
||||
import org.elasticsearch.client.graph.Vertex.VertexId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -41,7 +38,6 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
|
@ -51,7 +47,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
|
|||
*
|
||||
* @see GraphExploreRequest
|
||||
*/
|
||||
public class GraphExploreResponse extends ActionResponse implements ToXContentObject {
|
||||
public class GraphExploreResponse implements ToXContentObject {
|
||||
|
||||
private long tookInMillis;
|
||||
private boolean timedOut = false;
|
||||
|
@ -94,41 +90,6 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb
|
|||
return shardFailures;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
tookInMillis = in.readVLong();
|
||||
timedOut = in.readBoolean();
|
||||
|
||||
int size = in.readVInt();
|
||||
if (size == 0) {
|
||||
shardFailures = ShardSearchFailure.EMPTY_ARRAY;
|
||||
} else {
|
||||
shardFailures = new ShardSearchFailure[size];
|
||||
for (int i = 0; i < shardFailures.length; i++) {
|
||||
shardFailures[i] = readShardSearchFailure(in);
|
||||
}
|
||||
}
|
||||
// read vertices
|
||||
size = in.readVInt();
|
||||
vertices = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
Vertex n = Vertex.readFrom(in);
|
||||
vertices.put(n.getId(), n);
|
||||
}
|
||||
|
||||
size = in.readVInt();
|
||||
|
||||
connections = new HashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
Connection e = new Connection(in, vertices);
|
||||
connections.put(e.getId(), e);
|
||||
}
|
||||
|
||||
returnDetailedInfo = in.readBoolean();
|
||||
|
||||
}
|
||||
|
||||
public Collection<Connection> getConnections() {
|
||||
return connections.values();
|
||||
}
|
||||
|
@ -141,31 +102,6 @@ public class GraphExploreResponse extends ActionResponse implements ToXContentOb
|
|||
return vertices.get(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVLong(tookInMillis);
|
||||
out.writeBoolean(timedOut);
|
||||
|
||||
out.writeVInt(shardFailures.length);
|
||||
for (ShardOperationFailedException shardSearchFailure : shardFailures) {
|
||||
shardSearchFailure.writeTo(out);
|
||||
}
|
||||
|
||||
out.writeVInt(vertices.size());
|
||||
for (Vertex vertex : vertices.values()) {
|
||||
vertex.writeTo(out);
|
||||
}
|
||||
|
||||
out.writeVInt(connections.size());
|
||||
for (Connection connection : connections.values()) {
|
||||
connection.writeTo(out);
|
||||
}
|
||||
|
||||
out.writeBoolean(returnDetailedInfo);
|
||||
|
||||
}
|
||||
|
||||
private static final ParseField TOOK = new ParseField("took");
|
||||
private static final ParseField TIMED_OUT = new ParseField("timed_out");
|
||||
private static final ParseField VERTICES = new ParseField("vertices");
|
|
@ -16,12 +16,9 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
package org.elasticsearch.client.graph;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
@ -56,7 +53,7 @@ import java.util.List;
|
|||
* </p>
|
||||
*
|
||||
*/
|
||||
public class Hop implements ToXContentFragment{
|
||||
public class Hop implements ToXContentFragment {
|
||||
final Hop parentHop;
|
||||
List<VertexRequest> vertices = null;
|
||||
QueryBuilder guidingQuery = null;
|
||||
|
@ -65,44 +62,16 @@ public class Hop implements ToXContentFragment{
|
|||
this.parentHop = parent;
|
||||
}
|
||||
|
||||
public ActionRequestValidationException validate(ActionRequestValidationException validationException) {
|
||||
|
||||
public void validate(ValidationException validationException) {
|
||||
if (getEffectiveVertexRequests().size() == 0) {
|
||||
validationException = ValidateActions.addValidationError(GraphExploreRequest.NO_VERTICES_ERROR_MESSAGE, validationException);
|
||||
validationException.addValidationError(GraphExploreRequest.NO_VERTICES_ERROR_MESSAGE);
|
||||
}
|
||||
return validationException;
|
||||
|
||||
}
|
||||
|
||||
public Hop getParentHop() {
|
||||
return parentHop;
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalNamedWriteable(guidingQuery);
|
||||
if (vertices == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(vertices.size());
|
||||
for (VertexRequest vr : vertices) {
|
||||
vr.writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void readFrom(StreamInput in) throws IOException {
|
||||
guidingQuery = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
int size = in.readVInt();
|
||||
if (size > 0) {
|
||||
vertices = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
VertexRequest vr = new VertexRequest();
|
||||
vr.readFrom(in);
|
||||
vertices.add(vr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public QueryBuilder guidingQuery() {
|
||||
if (guidingQuery != null) {
|
||||
return guidingQuery;
|
|
@ -16,11 +16,9 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
package org.elasticsearch.client.graph;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -64,19 +62,6 @@ public class Vertex implements ToXContentFragment {
|
|||
this.bg = bg;
|
||||
this.fg = fg;
|
||||
}
|
||||
|
||||
static Vertex readFrom(StreamInput in) throws IOException {
|
||||
return new Vertex(in.readString(), in.readString(), in.readDouble(), in.readVInt(), in.readVLong(), in.readVLong());
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(field);
|
||||
out.writeString(term);
|
||||
out.writeDouble(weight);
|
||||
out.writeVInt(depth);
|
||||
out.writeVLong(bg);
|
||||
out.writeVLong(fg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
|
@ -16,13 +16,11 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
package org.elasticsearch.client.graph;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost;
|
||||
import org.elasticsearch.client.graph.GraphExploreRequest.TermBoost;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
@ -52,57 +50,6 @@ public class VertexRequest implements ToXContentObject {
|
|||
|
||||
}
|
||||
|
||||
void readFrom(StreamInput in) throws IOException {
|
||||
fieldName = in.readString();
|
||||
size = in.readVInt();
|
||||
minDocCount = in.readVInt();
|
||||
shardMinDocCount = in.readVInt();
|
||||
|
||||
int numIncludes = in.readVInt();
|
||||
if (numIncludes > 0) {
|
||||
includes = new HashMap<>();
|
||||
for (int i = 0; i < numIncludes; i++) {
|
||||
TermBoost tb = new TermBoost();
|
||||
tb.readFrom(in);
|
||||
includes.put(tb.term, tb);
|
||||
}
|
||||
}
|
||||
|
||||
int numExcludes = in.readVInt();
|
||||
if (numExcludes > 0) {
|
||||
excludes = new HashSet<>();
|
||||
for (int i = 0; i < numExcludes; i++) {
|
||||
excludes.add(in.readString());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeVInt(size);
|
||||
out.writeVInt(minDocCount);
|
||||
out.writeVInt(shardMinDocCount);
|
||||
|
||||
if (includes != null) {
|
||||
out.writeVInt(includes.size());
|
||||
for (TermBoost tb : includes.values()) {
|
||||
tb.writeTo(out);
|
||||
}
|
||||
} else {
|
||||
out.writeVInt(0);
|
||||
}
|
||||
|
||||
if (excludes != null) {
|
||||
out.writeVInt(excludes.size());
|
||||
for (String term : excludes) {
|
||||
out.writeString(term);
|
||||
}
|
||||
} else {
|
||||
out.writeVInt(0);
|
||||
}
|
||||
}
|
||||
|
||||
public String fieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
@ -224,7 +171,7 @@ public class VertexRequest implements ToXContentObject {
|
|||
if (shardMinDocCount != DEFAULT_SHARD_MIN_DOC_COUNT) {
|
||||
builder.field("shard_min_doc_count", shardMinDocCount);
|
||||
}
|
||||
if(includes!=null) {
|
||||
if (includes != null) {
|
||||
builder.startArray("include");
|
||||
for (TermBoost tb : includes.values()) {
|
||||
builder.startObject();
|
||||
|
@ -234,7 +181,7 @@ public class VertexRequest implements ToXContentObject {
|
|||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if(excludes!=null) {
|
||||
if (excludes != null) {
|
||||
builder.startArray("exclude");
|
||||
for (String value : excludes) {
|
||||
builder.value(value);
|
|
@ -21,4 +21,4 @@
|
|||
* Request and Response objects for the default distribution's Graph
|
||||
* APIs.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
package org.elasticsearch.client.graph;
|
|
@ -16,21 +16,17 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
package org.elasticsearch.client.migration;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.client.TimedRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
public class IndexUpgradeInfoRequest extends MasterNodeReadRequest<IndexUpgradeInfoRequest> implements IndicesRequest.Replaceable {
|
||||
public class IndexUpgradeInfoRequest extends TimedRequest implements IndicesRequest.Replaceable {
|
||||
|
||||
private String[] indices = Strings.EMPTY_ARRAY;
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true);
|
||||
|
@ -39,19 +35,6 @@ public class IndexUpgradeInfoRequest extends MasterNodeReadRequest<IndexUpgradeI
|
|||
indices(indices);
|
||||
}
|
||||
|
||||
public IndexUpgradeInfoRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
indices = in.readStringArray();
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringArray(indices);
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] indices() {
|
||||
return indices;
|
||||
|
@ -72,16 +55,6 @@ public class IndexUpgradeInfoRequest extends MasterNodeReadRequest<IndexUpgradeI
|
|||
this.indicesOptions = indicesOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
|
@ -16,25 +16,19 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
package org.elasticsearch.client.migration;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
public class IndexUpgradeInfoResponse extends ActionResponse implements ToXContentObject {
|
||||
public class IndexUpgradeInfoResponse {
|
||||
|
||||
private static final ParseField INDICES = new ParseField("indices");
|
||||
private static final ParseField ACTION_REQUIRED = new ParseField("action_required");
|
||||
|
@ -70,50 +64,16 @@ public class IndexUpgradeInfoResponse extends ActionResponse implements ToXConte
|
|||
}
|
||||
|
||||
|
||||
private Map<String, UpgradeActionRequired> actions;
|
||||
|
||||
public IndexUpgradeInfoResponse() {
|
||||
|
||||
}
|
||||
private final Map<String, UpgradeActionRequired> actions;
|
||||
|
||||
public IndexUpgradeInfoResponse(Map<String, UpgradeActionRequired> actions) {
|
||||
this.actions = actions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
actions = in.readMap(StreamInput::readString, UpgradeActionRequired::readFromStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeMap(actions, StreamOutput::writeString, (out1, value) -> value.writeTo(out1));
|
||||
}
|
||||
|
||||
public Map<String, UpgradeActionRequired> getActions() {
|
||||
return actions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startObject(INDICES.getPreferredName());
|
||||
for (Map.Entry<String, UpgradeActionRequired> entry : actions.entrySet()) {
|
||||
builder.startObject(entry.getKey());
|
||||
{
|
||||
builder.field(ACTION_REQUIRED.getPreferredName(), entry.getValue().toString());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
package org.elasticsearch.client.migration;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
|
@ -21,4 +21,4 @@
|
|||
* Request and Response objects for the default distribution's Migration
|
||||
* APIs.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.migration;
|
||||
package org.elasticsearch.client.migration;
|
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Requests an update to a {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} with the passed {@link DatafeedUpdate}
|
||||
* settings
|
||||
*/
|
||||
public class UpdateDatafeedRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
private final DatafeedUpdate update;
|
||||
|
||||
public UpdateDatafeedRequest(DatafeedUpdate update) {
|
||||
this.update = update;
|
||||
}
|
||||
|
||||
public DatafeedUpdate getDatafeedUpdate() {
|
||||
return update;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return update.toXContent(builder, params);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UpdateDatafeedRequest that = (UpdateDatafeedRequest) o;
|
||||
return Objects.equals(update, that.update);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(update);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return Strings.toString(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
@ -292,6 +293,10 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder setIndices(String... indices) {
|
||||
return setIndices(Arrays.asList(indices));
|
||||
}
|
||||
|
||||
public Builder setTypes(List<String> types) {
|
||||
this.types = types;
|
||||
return this;
|
||||
|
|
|
@ -19,14 +19,21 @@
|
|||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
public abstract class AcknowledgedResponse implements ToXContentObject {
|
||||
|
||||
protected static final String PARSE_FIELD_NAME = "acknowledged";
|
||||
private final boolean acknowledged;
|
||||
|
||||
public AcknowledgedResponse(final boolean acknowledged) {
|
||||
|
@ -37,6 +44,12 @@ public abstract class AcknowledgedResponse implements ToXContentObject {
|
|||
return acknowledged;
|
||||
}
|
||||
|
||||
protected static <T> ConstructingObjectParser<T, Void> generateParser(String name, Function<Boolean, T> ctor, String parseField) {
|
||||
ConstructingObjectParser<T, Void> p = new ConstructingObjectParser<>(name, true, args -> ctor.apply((boolean) args[0]));
|
||||
p.declareBoolean(constructorArg(), new ParseField(parseField));
|
||||
return p;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
|
@ -58,10 +71,16 @@ public abstract class AcknowledgedResponse implements ToXContentObject {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field("acknowledged", isAcknowledged());
|
||||
builder.field(getFieldName(), isAcknowledged());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the field name this response uses to output the acknowledged flag
|
||||
*/
|
||||
protected String getFieldName() {
|
||||
return PARSE_FIELD_NAME;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,28 +19,21 @@
|
|||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
public class DeleteRollupJobResponse extends AcknowledgedResponse {
|
||||
|
||||
public DeleteRollupJobResponse(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<DeleteRollupJobResponse, Void> PARSER = AcknowledgedResponse
|
||||
.generateParser("delete_rollup_job_response", DeleteRollupJobResponse::new, AcknowledgedResponse.PARSE_FIELD_NAME);
|
||||
|
||||
public static DeleteRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<DeleteRollupJobResponse, Void> PARSER
|
||||
= new ConstructingObjectParser<>("delete_rollup_job_response", true,
|
||||
args -> new DeleteRollupJobResponse((boolean) args[0]));
|
||||
static {
|
||||
PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,28 +18,21 @@
|
|||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
public class PutRollupJobResponse extends AcknowledgedResponse {
|
||||
|
||||
|
||||
public PutRollupJobResponse(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<PutRollupJobResponse, Void> PARSER = AcknowledgedResponse
|
||||
.generateParser("delete_rollup_job_response", PutRollupJobResponse::new, AcknowledgedResponse.PARSE_FIELD_NAME);
|
||||
|
||||
public static PutRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<PutRollupJobResponse, Void> PARSER
|
||||
= new ConstructingObjectParser<>("put_rollup_job_response", true, args -> new PutRollupJobResponse((boolean) args[0]));
|
||||
static {
|
||||
PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class StartRollupJobRequest implements Validatable {
|
||||
|
||||
private final String jobId;
|
||||
|
||||
public StartRollupJobRequest(final String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "id parameter must not be null");
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final StartRollupJobRequest that = (StartRollupJobRequest) o;
|
||||
return Objects.equals(jobId, that.jobId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class StartRollupJobResponse extends AcknowledgedResponse {
|
||||
|
||||
private static final String PARSE_FIELD_NAME = "started";
|
||||
|
||||
private static final ConstructingObjectParser<StartRollupJobResponse, Void> PARSER = AcknowledgedResponse
|
||||
.generateParser("delete_rollup_job_response", StartRollupJobResponse::new, PARSE_FIELD_NAME);
|
||||
|
||||
public StartRollupJobResponse(boolean acknowledged) {
|
||||
super(acknowledged);
|
||||
}
|
||||
|
||||
public static StartRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getFieldName() {
|
||||
return PARSE_FIELD_NAME;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,152 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.CharArrays;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to create a new OAuth2 token from the Elasticsearch cluster.
|
||||
*/
|
||||
public final class CreateTokenRequest implements Validatable, ToXContentObject {
|
||||
|
||||
private final String grantType;
|
||||
private final String scope;
|
||||
private final String username;
|
||||
private final char[] password;
|
||||
private final String refreshToken;
|
||||
|
||||
/**
|
||||
* General purpose constructor. This constructor is typically not useful, and one of the following factory methods should be used
|
||||
* instead:
|
||||
* <ul>
|
||||
* <li>{@link #passwordGrant(String, char[])}</li>
|
||||
* <li>{@link #refreshTokenGrant(String)}</li>
|
||||
* <li>{@link #clientCredentialsGrant()}</li>
|
||||
* </ul>
|
||||
*/
|
||||
public CreateTokenRequest(String grantType, @Nullable String scope, @Nullable String username, @Nullable char[] password,
|
||||
@Nullable String refreshToken) {
|
||||
if (Strings.isNullOrEmpty(grantType)) {
|
||||
throw new IllegalArgumentException("grant_type is required");
|
||||
}
|
||||
this.grantType = grantType;
|
||||
this.username = username;
|
||||
this.password = password;
|
||||
this.scope = scope;
|
||||
this.refreshToken = refreshToken;
|
||||
}
|
||||
|
||||
public static CreateTokenRequest passwordGrant(String username, char[] password) {
|
||||
if (Strings.isNullOrEmpty(username)) {
|
||||
throw new IllegalArgumentException("username is required");
|
||||
}
|
||||
if (password == null || password.length == 0) {
|
||||
throw new IllegalArgumentException("password is required");
|
||||
}
|
||||
return new CreateTokenRequest("password", null, username, password, null);
|
||||
}
|
||||
|
||||
public static CreateTokenRequest refreshTokenGrant(String refreshToken) {
|
||||
if (Strings.isNullOrEmpty(refreshToken)) {
|
||||
throw new IllegalArgumentException("refresh_token is required");
|
||||
}
|
||||
return new CreateTokenRequest("refresh_token", null, null, null, refreshToken);
|
||||
}
|
||||
|
||||
public static CreateTokenRequest clientCredentialsGrant() {
|
||||
return new CreateTokenRequest("client_credentials", null, null, null, null);
|
||||
}
|
||||
|
||||
public String getGrantType() {
|
||||
return grantType;
|
||||
}
|
||||
|
||||
public String getScope() {
|
||||
return scope;
|
||||
}
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public char[] getPassword() {
|
||||
return password;
|
||||
}
|
||||
|
||||
public String getRefreshToken() {
|
||||
return refreshToken;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject()
|
||||
.field("grant_type", grantType);
|
||||
if (scope != null) {
|
||||
builder.field("scope", scope);
|
||||
}
|
||||
if (username != null) {
|
||||
builder.field("username", username);
|
||||
}
|
||||
if (password != null) {
|
||||
byte[] passwordBytes = CharArrays.toUtf8Bytes(password);
|
||||
try {
|
||||
builder.field("password").utf8Value(passwordBytes, 0, passwordBytes.length);
|
||||
} finally {
|
||||
Arrays.fill(passwordBytes, (byte) 0);
|
||||
}
|
||||
}
|
||||
if (refreshToken != null) {
|
||||
builder.field("refresh_token", refreshToken);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final CreateTokenRequest that = (CreateTokenRequest) o;
|
||||
return Objects.equals(grantType, that.grantType) &&
|
||||
Objects.equals(scope, that.scope) &&
|
||||
Objects.equals(username, that.username) &&
|
||||
Arrays.equals(password, that.password) &&
|
||||
Objects.equals(refreshToken, that.refreshToken);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(grantType, scope, username, refreshToken);
|
||||
result = 31 * result + Arrays.hashCode(password);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* Response when creating a new OAuth2 token in the Elasticsearch cluster. Contains an access token, the token's expiry, and an optional
|
||||
* refresh token.
|
||||
*/
|
||||
public final class CreateTokenResponse {
|
||||
|
||||
private final String accessToken;
|
||||
private final String type;
|
||||
private final TimeValue expiresIn;
|
||||
private final String scope;
|
||||
private final String refreshToken;
|
||||
|
||||
public CreateTokenResponse(String accessToken, String type, TimeValue expiresIn, String scope, String refreshToken) {
|
||||
this.accessToken = accessToken;
|
||||
this.type = type;
|
||||
this.expiresIn = expiresIn;
|
||||
this.scope = scope;
|
||||
this.refreshToken = refreshToken;
|
||||
}
|
||||
|
||||
public String getAccessToken() {
|
||||
return accessToken;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public TimeValue getExpiresIn() {
|
||||
return expiresIn;
|
||||
}
|
||||
|
||||
public String getScope() {
|
||||
return scope;
|
||||
}
|
||||
|
||||
public String getRefreshToken() {
|
||||
return refreshToken;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final CreateTokenResponse that = (CreateTokenResponse) o;
|
||||
return Objects.equals(accessToken, that.accessToken) &&
|
||||
Objects.equals(type, that.type) &&
|
||||
Objects.equals(expiresIn, that.expiresIn) &&
|
||||
Objects.equals(scope, that.scope) &&
|
||||
Objects.equals(refreshToken, that.refreshToken);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(accessToken, type, expiresIn, scope, refreshToken);
|
||||
}
|
||||
|
||||
private static final ConstructingObjectParser<CreateTokenResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"create_token_response", true, args -> new CreateTokenResponse(
|
||||
(String) args[0], (String) args[1], TimeValue.timeValueSeconds((Long) args[2]), (String) args[3], (String) args[4]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(constructorArg(), new ParseField("access_token"));
|
||||
PARSER.declareString(constructorArg(), new ParseField("type"));
|
||||
PARSER.declareLong(constructorArg(), new ParseField("expires_in"));
|
||||
PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("scope"));
|
||||
PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("refresh_token"));
|
||||
}
|
||||
|
||||
public static CreateTokenResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,153 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
|
||||
import org.elasticsearch.client.security.support.expressiondsl.parser.RoleMapperExpressionParser;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* A representation of a single role-mapping.
|
||||
*
|
||||
* @see RoleMapperExpression
|
||||
* @see RoleMapperExpressionParser
|
||||
*/
|
||||
public final class ExpressionRoleMapping {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<ExpressionRoleMapping, String> PARSER = new ConstructingObjectParser<>("role-mapping", true,
|
||||
(args, name) -> new ExpressionRoleMapping(name, (RoleMapperExpression) args[0], (List<String>) args[1],
|
||||
(Map<String, Object>) args[2], (boolean) args[3]));
|
||||
|
||||
static {
|
||||
PARSER.declareField(constructorArg(), (parser, context) -> RoleMapperExpressionParser.fromXContent(parser), Fields.RULES,
|
||||
ObjectParser.ValueType.OBJECT);
|
||||
PARSER.declareStringArray(constructorArg(), Fields.ROLES);
|
||||
PARSER.declareField(constructorArg(), XContentParser::map, Fields.METADATA, ObjectParser.ValueType.OBJECT);
|
||||
PARSER.declareBoolean(constructorArg(), Fields.ENABLED);
|
||||
}
|
||||
|
||||
private final String name;
|
||||
private final RoleMapperExpression expression;
|
||||
private final List<String> roles;
|
||||
private final Map<String, Object> metadata;
|
||||
private final boolean enabled;
|
||||
|
||||
/**
|
||||
* Constructor for role mapping
|
||||
*
|
||||
* @param name role mapping name
|
||||
* @param expr {@link RoleMapperExpression} Expression used for role mapping
|
||||
* @param roles list of roles to be associated with the user
|
||||
* @param metadata metadata that helps to identify which roles are assigned
|
||||
* to the user
|
||||
* @param enabled a flag when {@code true} signifies the role mapping is active
|
||||
*/
|
||||
public ExpressionRoleMapping(final String name, final RoleMapperExpression expr, final List<String> roles,
|
||||
final Map<String, Object> metadata, boolean enabled) {
|
||||
this.name = name;
|
||||
this.expression = expr;
|
||||
this.roles = Collections.unmodifiableList(roles);
|
||||
this.metadata = (metadata == null) ? Collections.emptyMap() : Collections.unmodifiableMap(metadata);
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public RoleMapperExpression getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
public List<String> getRoles() {
|
||||
return roles;
|
||||
}
|
||||
|
||||
public Map<String, Object> getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + (enabled ? 1231 : 1237);
|
||||
result = prime * result + ((expression == null) ? 0 : expression.hashCode());
|
||||
result = prime * result + ((metadata == null) ? 0 : metadata.hashCode());
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
result = prime * result + ((roles == null) ? 0 : roles.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
final ExpressionRoleMapping other = (ExpressionRoleMapping) obj;
|
||||
if (enabled != other.enabled)
|
||||
return false;
|
||||
if (expression == null) {
|
||||
if (other.expression != null)
|
||||
return false;
|
||||
} else if (!expression.equals(other.expression))
|
||||
return false;
|
||||
if (metadata == null) {
|
||||
if (other.metadata != null)
|
||||
return false;
|
||||
} else if (!metadata.equals(other.metadata))
|
||||
return false;
|
||||
if (name == null) {
|
||||
if (other.name != null)
|
||||
return false;
|
||||
} else if (!name.equals(other.name))
|
||||
return false;
|
||||
if (roles == null) {
|
||||
if (other.roles != null)
|
||||
return false;
|
||||
} else if (!roles.equals(other.roles))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
public interface Fields {
|
||||
ParseField ROLES = new ParseField("roles");
|
||||
ParseField ENABLED = new ParseField("enabled");
|
||||
ParseField RULES = new ParseField("rules");
|
||||
ParseField METADATA = new ParseField("metadata");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Request object to get role mappings
|
||||
*/
|
||||
public final class GetRoleMappingsRequest implements Validatable {
|
||||
private final Set<String> roleMappingNames;
|
||||
|
||||
public GetRoleMappingsRequest(final String... roleMappingNames) {
|
||||
if (roleMappingNames != null) {
|
||||
this.roleMappingNames = Collections.unmodifiableSet(Sets.newHashSet(roleMappingNames));
|
||||
} else {
|
||||
this.roleMappingNames = Collections.emptySet();
|
||||
}
|
||||
}
|
||||
|
||||
public Set<String> getRoleMappingNames() {
|
||||
return roleMappingNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(roleMappingNames);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final GetRoleMappingsRequest other = (GetRoleMappingsRequest) obj;
|
||||
|
||||
return Objects.equals(roleMappingNames, other.roleMappingNames);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Get role mappings response
|
||||
*/
|
||||
public final class GetRoleMappingsResponse {
|
||||
|
||||
private final List<ExpressionRoleMapping> mappings;
|
||||
|
||||
public GetRoleMappingsResponse(List<ExpressionRoleMapping> mappings) {
|
||||
this.mappings = Collections.unmodifiableList(mappings);
|
||||
}
|
||||
|
||||
public List<ExpressionRoleMapping> getMappings() {
|
||||
return mappings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final GetRoleMappingsResponse that = (GetRoleMappingsResponse) o;
|
||||
return this.mappings.equals(that.mappings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return mappings.hashCode();
|
||||
}
|
||||
|
||||
public static GetRoleMappingsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
final List<ExpressionRoleMapping> roleMappings = new ArrayList<>();
|
||||
|
||||
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
roleMappings.add(ExpressionRoleMapping.PARSER.parse(parser, parser.currentName()));
|
||||
}
|
||||
|
||||
return new GetRoleMappingsResponse(roleMappings);
|
||||
}
|
||||
}
|
|
@ -62,7 +62,6 @@ public final class PutUserResponse {
|
|||
|
||||
static {
|
||||
PARSER.declareBoolean(constructorArg(), new ParseField("created"));
|
||||
PARSER.declareObject((a,b) -> {}, (parser, context) -> null, new ParseField("user")); // ignore the user field!
|
||||
}
|
||||
|
||||
public static PutUserResponse fromXContent(XContentParser parser) throws IOException {
|
||||
|
|
|
@ -45,6 +45,18 @@ import java.util.List;
|
|||
public final class RoleMapperExpressionParser {
|
||||
public static final ParseField FIELD = new ParseField("field");
|
||||
|
||||
public static RoleMapperExpression fromXContent(final XContentParser parser) throws IOException {
|
||||
return new RoleMapperExpressionParser().parse("rules", parser);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function exists to be compatible with
|
||||
* {@link org.elasticsearch.common.xcontent.ContextParser#parse(XContentParser, Object)}
|
||||
*/
|
||||
public static RoleMapperExpression parseObject(XContentParser parser, String id) throws IOException {
|
||||
return new RoleMapperExpressionParser().parse(id, parser);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param name The name of the expression tree within its containing object.
|
||||
* Used to provide descriptive error messages.
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.client.watcher;
|
|||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -27,7 +27,6 @@ public class DeactivateWatchRequest implements Validatable {
|
|||
private final String watchId;
|
||||
|
||||
public DeactivateWatchRequest(String watchId) {
|
||||
|
||||
Objects.requireNonNull(watchId, "watch id is missing");
|
||||
if (PutWatchRequest.isValidId(watchId) == false) {
|
||||
throw new IllegalArgumentException("watch id contains whitespace");
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A delete watch request to delete an watch by name (id)
|
||||
*/
|
||||
public class DeleteWatchRequest implements Validatable {
|
||||
|
||||
private final String id;
|
||||
|
||||
public DeleteWatchRequest(String id) {
|
||||
Objects.requireNonNull(id, "watch id is missing");
|
||||
if (PutWatchRequest.isValidId(id) == false) {
|
||||
throw new IllegalArgumentException("watch id contains whitespace");
|
||||
}
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The name of the watch to be deleted
|
||||
*/
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "delete [" + id + "]";
|
||||
}
|
||||
}
|
|
@ -16,12 +16,9 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.watcher;
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -30,7 +27,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DeleteWatchResponse extends ActionResponse implements ToXContentObject {
|
||||
public class DeleteWatchResponse implements ToXContentObject {
|
||||
|
||||
private static final ObjectParser<DeleteWatchResponse, Void> PARSER
|
||||
= new ObjectParser<>("x_pack_delete_watch_response", DeleteWatchResponse::new);
|
||||
|
@ -92,22 +89,6 @@ public class DeleteWatchResponse extends ActionResponse implements ToXContentObj
|
|||
return Objects.hash(id, version, found);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
id = in.readString();
|
||||
version = in.readVLong();
|
||||
found = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(id);
|
||||
out.writeVLong(version);
|
||||
out.writeBoolean(found);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.startObject()
|
|
@ -16,67 +16,43 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.watcher;
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* This request class contains the data needed to create a watch along with the name of the watch.
|
||||
* The name of the watch will become the ID of the indexed document.
|
||||
*/
|
||||
public final class PutWatchRequest extends ActionRequest {
|
||||
public final class PutWatchRequest implements Validatable {
|
||||
|
||||
private static final Pattern NO_WS_PATTERN = Pattern.compile("\\S+");
|
||||
|
||||
private String id;
|
||||
private BytesReference source;
|
||||
private XContentType xContentType = XContentType.JSON;
|
||||
private final String id;
|
||||
private final BytesReference source;
|
||||
private final XContentType xContentType;
|
||||
private boolean active = true;
|
||||
private long version = Versions.MATCH_ANY;
|
||||
|
||||
public PutWatchRequest() {}
|
||||
|
||||
public PutWatchRequest(StreamInput in) throws IOException {
|
||||
readFrom(in);
|
||||
}
|
||||
|
||||
public PutWatchRequest(String id, BytesReference source, XContentType xContentType) {
|
||||
Objects.requireNonNull(id, "watch id is missing");
|
||||
if (isValidId(id) == false) {
|
||||
throw new IllegalArgumentException("watch id contains whitespace");
|
||||
}
|
||||
Objects.requireNonNull(source, "watch source is missing");
|
||||
Objects.requireNonNull(xContentType, "request body is missing");
|
||||
this.id = id;
|
||||
this.source = source;
|
||||
this.xContentType = xContentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
id = in.readString();
|
||||
source = in.readBytesReference();
|
||||
active = in.readBoolean();
|
||||
xContentType = in.readEnum(XContentType.class);
|
||||
version = in.readZLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(id);
|
||||
out.writeBytesReference(source);
|
||||
out.writeBoolean(active);
|
||||
out.writeEnum(xContentType);
|
||||
out.writeZLong(version);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The name that will be the ID of the indexed document
|
||||
*/
|
||||
|
@ -84,13 +60,6 @@ public final class PutWatchRequest extends ActionRequest {
|
|||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the watch name
|
||||
*/
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The source of the watch
|
||||
*/
|
||||
|
@ -98,14 +67,6 @@ public final class PutWatchRequest extends ActionRequest {
|
|||
return source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the source of the watch
|
||||
*/
|
||||
public void setSource(BytesReference source, XContentType xContentType) {
|
||||
this.source = source;
|
||||
this.xContentType = xContentType;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The initial active state of the watch (defaults to {@code true}, e.g. "active")
|
||||
*/
|
||||
|
@ -135,23 +96,6 @@ public final class PutWatchRequest extends ActionRequest {
|
|||
this.version = version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
if (id == null) {
|
||||
validationException = ValidateActions.addValidationError("watch id is missing", validationException);
|
||||
} else if (isValidId(id) == false) {
|
||||
validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException);
|
||||
}
|
||||
if (source == null) {
|
||||
validationException = ValidateActions.addValidationError("watch source is missing", validationException);
|
||||
}
|
||||
if (xContentType == null) {
|
||||
validationException = ValidateActions.addValidationError("request body is missing", validationException);
|
||||
}
|
||||
return validationException;
|
||||
}
|
||||
|
||||
public static boolean isValidId(String id) {
|
||||
return Strings.isEmpty(id) == false && NO_WS_PATTERN.matcher(id).matches();
|
||||
}
|
|
@ -16,12 +16,9 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.watcher;
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -30,14 +27,15 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PutWatchResponse extends ActionResponse implements ToXContentObject {
|
||||
public class PutWatchResponse implements ToXContentObject {
|
||||
|
||||
private static final ObjectParser<PutWatchResponse, Void> PARSER
|
||||
= new ObjectParser<>("x_pack_put_watch_response", PutWatchResponse::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(PutWatchResponse::setId, new ParseField("_id"));
|
||||
PARSER.declareLong(PutWatchResponse::setVersion, new ParseField("_version"));
|
||||
PARSER.declareBoolean(PutWatchResponse::setCreated, new ParseField("created"));
|
||||
PARSER.declareString(PutWatchResponse::setId, new ParseField("_id"));
|
||||
PARSER.declareLong(PutWatchResponse::setVersion, new ParseField("_version"));
|
||||
PARSER.declareBoolean(PutWatchResponse::setCreated, new ParseField("created"));
|
||||
}
|
||||
|
||||
private String id;
|
||||
|
@ -92,22 +90,6 @@ public class PutWatchResponse extends ActionResponse implements ToXContentObject
|
|||
return Objects.hash(id, version, created);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(id);
|
||||
out.writeVLong(version);
|
||||
out.writeBoolean(created);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
id = in.readString();
|
||||
version = in.readVLong();
|
||||
created = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.startObject()
|
|
@ -21,4 +21,4 @@
|
|||
* Request and Response objects for the default distribution's Watcher
|
||||
* APIs.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.watcher;
|
||||
package org.elasticsearch.client.watcher;
|
|
@ -24,17 +24,16 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.client.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.client.graph.Hop;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class GrapRequestConvertersTests extends ESTestCase{
|
||||
public class GrapRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public void testGraphExplore() throws Exception {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
|
@ -43,14 +42,14 @@ public class GrapRequestConvertersTests extends ESTestCase{
|
|||
graphExploreRequest.sampleDiversityField("diversity");
|
||||
graphExploreRequest.indices("index1", "index2");
|
||||
graphExploreRequest.types("type1", "type2");
|
||||
int timeout = ESTestCase.randomIntBetween(10000, 20000);
|
||||
int timeout = randomIntBetween(10000, 20000);
|
||||
graphExploreRequest.timeout(TimeValue.timeValueMillis(timeout));
|
||||
graphExploreRequest.useSignificance(ESTestCase.randomBoolean());
|
||||
int numHops = ESTestCase.randomIntBetween(1, 5);
|
||||
graphExploreRequest.useSignificance(randomBoolean());
|
||||
int numHops = randomIntBetween(1, 5);
|
||||
for (int i = 0; i < numHops; i++) {
|
||||
int hopNumber = i + 1;
|
||||
QueryBuilder guidingQuery = null;
|
||||
if (ESTestCase.randomBoolean()) {
|
||||
if (randomBoolean()) {
|
||||
guidingQuery = new TermQueryBuilder("field" + hopNumber, "value" + hopNumber);
|
||||
}
|
||||
Hop hop = graphExploreRequest.createNextHop(guidingQuery);
|
||||
|
@ -58,10 +57,10 @@ public class GrapRequestConvertersTests extends ESTestCase{
|
|||
hop.getVertexRequest(0).addInclude("value" + hopNumber, hopNumber);
|
||||
}
|
||||
Request request = GraphRequestConverters.explore(graphExploreRequest);
|
||||
Assert.assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
Assert.assertEquals("/index1,index2/type1,type2/_xpack/graph/_explore", request.getEndpoint());
|
||||
Assert.assertEquals(expectedParams, request.getParameters());
|
||||
Assert.assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters()));
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/index1,index2/type1,type2/_xpack/graph/_explore", request.getEndpoint());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters()));
|
||||
RequestConvertersTests.assertToXContentBody(graphExploreRequest, request.getEntity());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,11 +23,11 @@ import org.apache.http.client.methods.HttpPut;
|
|||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex;
|
||||
import org.elasticsearch.protocol.xpack.graph.VertexRequest;
|
||||
import org.elasticsearch.client.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.client.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.client.graph.Hop;
|
||||
import org.elasticsearch.client.graph.Vertex;
|
||||
import org.elasticsearch.client.graph.VertexRequest;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -136,4 +136,4 @@ public class GraphIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,12 +64,14 @@ import org.elasticsearch.client.ml.StartDatafeedRequest;
|
|||
import org.elasticsearch.client.ml.StartDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.StopDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.StopDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.client.ml.calendars.CalendarTests;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedState;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
|
||||
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
|
||||
import org.elasticsearch.client.ml.job.config.DataDescription;
|
||||
import org.elasticsearch.client.ml.job.config.Detector;
|
||||
|
@ -357,6 +359,33 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(createdDatafeed.getIndices(), equalTo(datafeedConfig.getIndices()));
|
||||
}
|
||||
|
||||
public void testUpdateDatafeed() throws Exception {
|
||||
String jobId = randomValidJobId();
|
||||
Job job = buildJob(jobId);
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
execute(new PutJobRequest(job), machineLearningClient::putJob, machineLearningClient::putJobAsync);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
DatafeedConfig datafeedConfig = DatafeedConfig.builder(datafeedId, jobId).setIndices("some_data_index").build();
|
||||
|
||||
PutDatafeedResponse response = machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeedConfig), RequestOptions.DEFAULT);
|
||||
|
||||
DatafeedConfig createdDatafeed = response.getResponse();
|
||||
assertThat(createdDatafeed.getId(), equalTo(datafeedId));
|
||||
assertThat(createdDatafeed.getIndices(), equalTo(datafeedConfig.getIndices()));
|
||||
|
||||
DatafeedUpdate datafeedUpdate = DatafeedUpdate.builder(datafeedId).setIndices("some_other_data_index").setScrollSize(10).build();
|
||||
|
||||
response = execute(new UpdateDatafeedRequest(datafeedUpdate),
|
||||
machineLearningClient::updateDatafeed,
|
||||
machineLearningClient::updateDatafeedAsync);
|
||||
|
||||
DatafeedConfig updatedDatafeed = response.getResponse();
|
||||
assertThat(datafeedUpdate.getId(), equalTo(updatedDatafeed.getId()));
|
||||
assertThat(datafeedUpdate.getIndices(), equalTo(updatedDatafeed.getIndices()));
|
||||
assertThat(datafeedUpdate.getScrollSize(), equalTo(updatedDatafeed.getScrollSize()));
|
||||
}
|
||||
|
||||
public void testGetDatafeed() throws Exception {
|
||||
String jobId1 = "test-get-datafeed-job-1";
|
||||
String jobId2 = "test-get-datafeed-job-2";
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
import org.elasticsearch.client.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.client.migration.IndexUpgradeInfoResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.client.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
@ -28,7 +28,7 @@ import java.util.Map;
|
|||
|
||||
public class MigrationRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public static void testGetMigrationAssistance() {
|
||||
public void testGetMigrationAssistance() {
|
||||
IndexUpgradeInfoRequest upgradeInfoRequest = new IndexUpgradeInfoRequest();
|
||||
String expectedEndpoint = "/_xpack/migration/assistance";
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -1514,13 +1514,13 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
String[] includes = new String[numIncludes];
|
||||
String includesParam = randomFields(includes);
|
||||
if (numIncludes > 0) {
|
||||
expectedParams.put("_source_include", includesParam);
|
||||
expectedParams.put("_source_includes", includesParam);
|
||||
}
|
||||
int numExcludes = randomIntBetween(0, 5);
|
||||
String[] excludes = new String[numExcludes];
|
||||
String excludesParam = randomFields(excludes);
|
||||
if (numExcludes > 0) {
|
||||
expectedParams.put("_source_exclude", excludesParam);
|
||||
expectedParams.put("_source_excludes", excludesParam);
|
||||
}
|
||||
consumer.accept(new FetchSourceContext(true, includes, excludes));
|
||||
}
|
||||
|
|
|
@ -39,6 +39,8 @@ import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState;
|
|||
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.RollableIndexCaps;
|
||||
import org.elasticsearch.client.rollup.RollupJobCaps;
|
||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||
|
@ -150,7 +152,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
|||
PutRollupJobRequest putRollupJobRequest =
|
||||
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
|
||||
final RollupClient rollupClient = highLevelClient().rollup();
|
||||
PutRollupJobResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||
execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||
DeleteRollupJobRequest deleteRollupJobRequest = new DeleteRollupJobRequest(id);
|
||||
DeleteRollupJobResponse deleteRollupJobResponse = highLevelClient().rollup()
|
||||
.deleteRollupJob(deleteRollupJobRequest, RequestOptions.DEFAULT);
|
||||
|
@ -164,8 +166,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(responseException.status().getStatus(), is(404));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testPutAndGetRollupJob() throws Exception {
|
||||
public void testPutStartAndGetRollupJob() throws Exception {
|
||||
// TODO expand this to also test with histogram and terms?
|
||||
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY));
|
||||
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
|
||||
|
@ -178,9 +179,9 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
|||
PutRollupJobResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
|
||||
assertTrue(response.isAcknowledged());
|
||||
|
||||
// TODO Replace this with the Rollup Start Job API
|
||||
Response startResponse = client().performRequest(new Request("POST", "/_xpack/rollup/job/" + id + "/_start"));
|
||||
assertEquals(RestStatus.OK.getStatus(), startResponse.getHttpResponse().getStatusLine().getStatusCode());
|
||||
StartRollupJobRequest startRequest = new StartRollupJobRequest(id);
|
||||
StartRollupJobResponse startResponse = execute(startRequest, rollupClient::startRollupJob, rollupClient::startRollupJobAsync);
|
||||
assertTrue(startResponse.isAcknowledged());
|
||||
|
||||
assertBusy(() -> {
|
||||
SearchResponse searchResponse = highLevelClient().search(new SearchRequest(rollupIndex), RequestOptions.DEFAULT);
|
||||
|
|
|
@ -20,17 +20,19 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class RollupRequestConvertersTests extends ESTestCase {
|
||||
|
@ -47,6 +49,18 @@ public class RollupRequestConvertersTests extends ESTestCase {
|
|||
RequestConvertersTests.assertToXContentBody(put, request.getEntity());
|
||||
}
|
||||
|
||||
public void testStartJob() throws IOException {
|
||||
String jobId = randomAlphaOfLength(5);
|
||||
|
||||
StartRollupJobRequest startJob = new StartRollupJobRequest(jobId);
|
||||
|
||||
Request request = RollupRequestConverters.startJob(startJob);
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/rollup/job/" + jobId + "/_start"));
|
||||
assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(request.getParameters().keySet(), empty());
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testGetJob() {
|
||||
boolean getAll = randomBoolean();
|
||||
String job = getAll ? "_all" : RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
|
|
|
@ -19,13 +19,16 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.security.CreateTokenRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleRequest;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.GetRoleMappingsRequest;
|
||||
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||
import org.elasticsearch.client.security.PutRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
|
@ -33,6 +36,7 @@ import org.elasticsearch.client.security.RefreshPolicy;
|
|||
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
|
||||
import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression;
|
||||
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -102,6 +106,25 @@ public class SecurityRequestConvertersTests extends ESTestCase {
|
|||
assertToXContentBody(putRoleMappingRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetRoleMappings() throws IOException {
|
||||
int noOfRoleMappingNames = randomIntBetween(0, 2);
|
||||
final String[] roleMappingNames =
|
||||
randomArray(noOfRoleMappingNames, noOfRoleMappingNames, String[]::new, () -> randomAlphaOfLength(5));
|
||||
final GetRoleMappingsRequest getRoleMappingsRequest = new GetRoleMappingsRequest(roleMappingNames);
|
||||
|
||||
final Request request = SecurityRequestConverters.getRoleMappings(getRoleMappingsRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
if (noOfRoleMappingNames == 0) {
|
||||
assertEquals("/_xpack/security/role_mapping", request.getEndpoint());
|
||||
} else {
|
||||
assertEquals("/_xpack/security/role_mapping/" +
|
||||
Strings.collectionToCommaDelimitedString(getRoleMappingsRequest.getRoleMappingNames()), request.getEndpoint());
|
||||
}
|
||||
assertEquals(Collections.emptyMap(), request.getParameters());
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testEnableUser() {
|
||||
final String username = randomAlphaOfLengthBetween(1, 12);
|
||||
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
|
||||
|
@ -189,4 +212,34 @@ public class SecurityRequestConvertersTests extends ESTestCase {
|
|||
assertEquals(expectedParams, request.getParameters());
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testCreateTokenWithPasswordGrant() throws Exception {
|
||||
final String username = randomAlphaOfLengthBetween(1, 12);
|
||||
final String password = randomAlphaOfLengthBetween(8, 12);
|
||||
CreateTokenRequest createTokenRequest = CreateTokenRequest.passwordGrant(username, password.toCharArray());
|
||||
Request request = SecurityRequestConverters.createToken(createTokenRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/security/oauth2/token", request.getEndpoint());
|
||||
assertEquals(0, request.getParameters().size());
|
||||
assertToXContentBody(createTokenRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testCreateTokenWithRefreshTokenGrant() throws Exception {
|
||||
final String refreshToken = randomAlphaOfLengthBetween(8, 24);
|
||||
CreateTokenRequest createTokenRequest = CreateTokenRequest.refreshTokenGrant(refreshToken);
|
||||
Request request = SecurityRequestConverters.createToken(createTokenRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/security/oauth2/token", request.getEndpoint());
|
||||
assertEquals(0, request.getParameters().size());
|
||||
assertToXContentBody(createTokenRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testCreateTokenWithClientCredentialsGrant() throws Exception {
|
||||
CreateTokenRequest createTokenRequest = CreateTokenRequest.clientCredentialsGrant();
|
||||
Request request = SecurityRequestConverters.createToken(createTokenRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/security/oauth2/token", request.getEndpoint());
|
||||
assertEquals(0, request.getParameters().size());
|
||||
assertToXContentBody(createTokenRequest, request.getEntity());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,10 +35,10 @@ import org.elasticsearch.client.watcher.StopWatchServiceRequest;
|
|||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchResponse;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.elasticsearch.client.watcher.StartWatchServiceRequest;
|
|||
import org.elasticsearch.client.watcher.StopWatchServiceRequest;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
|
@ -56,11 +56,9 @@ public class WatcherRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testPutWatch() throws Exception {
|
||||
PutWatchRequest putWatchRequest = new PutWatchRequest();
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
putWatchRequest.setId(watchId);
|
||||
String body = randomAlphaOfLength(20);
|
||||
putWatchRequest.setSource(new BytesArray(body), XContentType.JSON);
|
||||
PutWatchRequest putWatchRequest = new PutWatchRequest(watchId, new BytesArray(body), XContentType.JSON);
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
|
@ -94,9 +92,8 @@ public class WatcherRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testDeleteWatch() {
|
||||
DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest();
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
deleteWatchRequest.setId(watchId);
|
||||
DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(watchId);
|
||||
|
||||
Request request = WatcherRequestConverters.deleteWatch(deleteWatchRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
|
|
|
@ -708,14 +708,15 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
for (BulkItemResponse bulkItemResponse : bulkResponse) { // <1>
|
||||
DocWriteResponse itemResponse = bulkItemResponse.getResponse(); // <2>
|
||||
|
||||
if (bulkItemResponse.getOpType() == DocWriteRequest.OpType.INDEX
|
||||
|| bulkItemResponse.getOpType() == DocWriteRequest.OpType.CREATE) { // <3>
|
||||
switch (bulkItemResponse.getOpType()) {
|
||||
case INDEX: // <3>
|
||||
case CREATE:
|
||||
IndexResponse indexResponse = (IndexResponse) itemResponse;
|
||||
|
||||
} else if (bulkItemResponse.getOpType() == DocWriteRequest.OpType.UPDATE) { // <4>
|
||||
break;
|
||||
case UPDATE: // <4>
|
||||
UpdateResponse updateResponse = (UpdateResponse) itemResponse;
|
||||
|
||||
} else if (bulkItemResponse.getOpType() == DocWriteRequest.OpType.DELETE) { // <5>
|
||||
break;
|
||||
case DELETE: // <5>
|
||||
DeleteResponse deleteResponse = (DeleteResponse) itemResponse;
|
||||
}
|
||||
}
|
||||
|
@ -728,8 +729,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// tag::bulk-errors
|
||||
for (BulkItemResponse bulkItemResponse : bulkResponse) {
|
||||
if (bulkItemResponse.isFailed()) { // <1>
|
||||
BulkItemResponse.Failure failure = bulkItemResponse.getFailure(); // <2>
|
||||
|
||||
BulkItemResponse.Failure failure =
|
||||
bulkItemResponse.getFailure(); // <2>
|
||||
}
|
||||
}
|
||||
// end::bulk-errors
|
||||
|
@ -839,8 +840,10 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// tag::reindex-request-remote
|
||||
request.setRemoteInfo(
|
||||
new RemoteInfo(
|
||||
"https", "localhost", 9002, null, new BytesArray(new MatchAllQueryBuilder().toString()),
|
||||
"user", "pass", Collections.emptyMap(), new TimeValue(100, TimeUnit.MILLISECONDS),
|
||||
"https", "localhost", 9002, null,
|
||||
new BytesArray(new MatchAllQueryBuilder().toString()),
|
||||
"user", "pass", Collections.emptyMap(),
|
||||
new TimeValue(100, TimeUnit.MILLISECONDS),
|
||||
new TimeValue(100, TimeUnit.SECONDS)
|
||||
)
|
||||
); // <1>
|
||||
|
@ -861,7 +864,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
|
||||
// tag::reindex-execute
|
||||
BulkByScrollResponse bulkResponse = client.reindex(request, RequestOptions.DEFAULT);
|
||||
BulkByScrollResponse bulkResponse =
|
||||
client.reindex(request, RequestOptions.DEFAULT);
|
||||
// end::reindex-execute
|
||||
assertSame(0, bulkResponse.getSearchFailures().size());
|
||||
assertSame(0, bulkResponse.getBulkFailures().size());
|
||||
|
@ -878,9 +882,12 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
long bulkRetries = bulkResponse.getBulkRetries(); // <10>
|
||||
long searchRetries = bulkResponse.getSearchRetries(); // <11>
|
||||
TimeValue throttledMillis = bulkResponse.getStatus().getThrottled(); // <12>
|
||||
TimeValue throttledUntilMillis = bulkResponse.getStatus().getThrottledUntil(); // <13>
|
||||
List<ScrollableHitSource.SearchFailure> searchFailures = bulkResponse.getSearchFailures(); // <14>
|
||||
List<BulkItemResponse.Failure> bulkFailures = bulkResponse.getBulkFailures(); // <15>
|
||||
TimeValue throttledUntilMillis =
|
||||
bulkResponse.getStatus().getThrottledUntil(); // <13>
|
||||
List<ScrollableHitSource.SearchFailure> searchFailures =
|
||||
bulkResponse.getSearchFailures(); // <14>
|
||||
List<BulkItemResponse.Failure> bulkFailures =
|
||||
bulkResponse.getBulkFailures(); // <15>
|
||||
// end::reindex-response
|
||||
}
|
||||
{
|
||||
|
@ -888,8 +895,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
request.setSourceIndices("source1");
|
||||
request.setDestIndex("dest");
|
||||
|
||||
ActionListener<BulkByScrollResponse> listener;
|
||||
// tag::reindex-execute-listener
|
||||
ActionListener<BulkByScrollResponse> listener = new ActionListener<BulkByScrollResponse>() {
|
||||
listener = new ActionListener<BulkByScrollResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkByScrollResponse bulkResponse) {
|
||||
// <1>
|
||||
|
@ -939,8 +947,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::rethrottle-request-execution
|
||||
}
|
||||
|
||||
ActionListener<ListTasksResponse> listener;
|
||||
// tag::rethrottle-request-async-listener
|
||||
ActionListener<ListTasksResponse> listener = new ActionListener<ListTasksResponse>() {
|
||||
listener = new ActionListener<ListTasksResponse>() {
|
||||
@Override
|
||||
public void onResponse(ListTasksResponse response) {
|
||||
// <1>
|
||||
|
@ -959,9 +968,12 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
RethrottleRequest request = new RethrottleRequest(taskId);
|
||||
// tag::rethrottle-execute-async
|
||||
client.reindexRethrottleAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
client.updateByQueryRethrottleAsync(request, RequestOptions.DEFAULT, listener); // <2>
|
||||
client.deleteByQueryRethrottleAsync(request, RequestOptions.DEFAULT, listener); // <3>
|
||||
client.reindexRethrottleAsync(request,
|
||||
RequestOptions.DEFAULT, listener); // <1>
|
||||
client.updateByQueryRethrottleAsync(request,
|
||||
RequestOptions.DEFAULT, listener); // <2>
|
||||
client.deleteByQueryRethrottleAsync(request,
|
||||
RequestOptions.DEFAULT, listener); // <3>
|
||||
// end::rethrottle-execute-async
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
@ -990,7 +1002,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
{
|
||||
// tag::update-by-query-request
|
||||
UpdateByQueryRequest request = new UpdateByQueryRequest("source1", "source2"); // <1>
|
||||
UpdateByQueryRequest request =
|
||||
new UpdateByQueryRequest("source1", "source2"); // <1>
|
||||
// end::update-by-query-request
|
||||
// tag::update-by-query-request-conflicts
|
||||
request.setConflicts("proceed"); // <1>
|
||||
|
@ -1034,7 +1047,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::update-by-query-request-indicesOptions
|
||||
|
||||
// tag::update-by-query-execute
|
||||
BulkByScrollResponse bulkResponse = client.updateByQuery(request, RequestOptions.DEFAULT);
|
||||
BulkByScrollResponse bulkResponse =
|
||||
client.updateByQuery(request, RequestOptions.DEFAULT);
|
||||
// end::update-by-query-execute
|
||||
assertSame(0, bulkResponse.getSearchFailures().size());
|
||||
assertSame(0, bulkResponse.getBulkFailures().size());
|
||||
|
@ -1050,17 +1064,21 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
long bulkRetries = bulkResponse.getBulkRetries(); // <9>
|
||||
long searchRetries = bulkResponse.getSearchRetries(); // <10>
|
||||
TimeValue throttledMillis = bulkResponse.getStatus().getThrottled(); // <11>
|
||||
TimeValue throttledUntilMillis = bulkResponse.getStatus().getThrottledUntil(); // <12>
|
||||
List<ScrollableHitSource.SearchFailure> searchFailures = bulkResponse.getSearchFailures(); // <13>
|
||||
List<BulkItemResponse.Failure> bulkFailures = bulkResponse.getBulkFailures(); // <14>
|
||||
TimeValue throttledUntilMillis =
|
||||
bulkResponse.getStatus().getThrottledUntil(); // <12>
|
||||
List<ScrollableHitSource.SearchFailure> searchFailures =
|
||||
bulkResponse.getSearchFailures(); // <13>
|
||||
List<BulkItemResponse.Failure> bulkFailures =
|
||||
bulkResponse.getBulkFailures(); // <14>
|
||||
// end::update-by-query-response
|
||||
}
|
||||
{
|
||||
UpdateByQueryRequest request = new UpdateByQueryRequest();
|
||||
request.indices("source1");
|
||||
|
||||
ActionListener<BulkByScrollResponse> listener;
|
||||
// tag::update-by-query-execute-listener
|
||||
ActionListener<BulkByScrollResponse> listener = new ActionListener<BulkByScrollResponse>() {
|
||||
listener = new ActionListener<BulkByScrollResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkByScrollResponse bulkResponse) {
|
||||
// <1>
|
||||
|
@ -1108,7 +1126,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
{
|
||||
// tag::delete-by-query-request
|
||||
DeleteByQueryRequest request = new DeleteByQueryRequest("source1", "source2"); // <1>
|
||||
DeleteByQueryRequest request =
|
||||
new DeleteByQueryRequest("source1", "source2"); // <1>
|
||||
// end::delete-by-query-request
|
||||
// tag::delete-by-query-request-conflicts
|
||||
request.setConflicts("proceed"); // <1>
|
||||
|
@ -1142,7 +1161,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// end::delete-by-query-request-indicesOptions
|
||||
|
||||
// tag::delete-by-query-execute
|
||||
BulkByScrollResponse bulkResponse = client.deleteByQuery(request, RequestOptions.DEFAULT);
|
||||
BulkByScrollResponse bulkResponse =
|
||||
client.deleteByQuery(request, RequestOptions.DEFAULT);
|
||||
// end::delete-by-query-execute
|
||||
assertSame(0, bulkResponse.getSearchFailures().size());
|
||||
assertSame(0, bulkResponse.getBulkFailures().size());
|
||||
|
@ -1157,17 +1177,21 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
long bulkRetries = bulkResponse.getBulkRetries(); // <8>
|
||||
long searchRetries = bulkResponse.getSearchRetries(); // <9>
|
||||
TimeValue throttledMillis = bulkResponse.getStatus().getThrottled(); // <10>
|
||||
TimeValue throttledUntilMillis = bulkResponse.getStatus().getThrottledUntil(); // <11>
|
||||
List<ScrollableHitSource.SearchFailure> searchFailures = bulkResponse.getSearchFailures(); // <12>
|
||||
List<BulkItemResponse.Failure> bulkFailures = bulkResponse.getBulkFailures(); // <13>
|
||||
TimeValue throttledUntilMillis =
|
||||
bulkResponse.getStatus().getThrottledUntil(); // <11>
|
||||
List<ScrollableHitSource.SearchFailure> searchFailures =
|
||||
bulkResponse.getSearchFailures(); // <12>
|
||||
List<BulkItemResponse.Failure> bulkFailures =
|
||||
bulkResponse.getBulkFailures(); // <13>
|
||||
// end::delete-by-query-response
|
||||
}
|
||||
{
|
||||
DeleteByQueryRequest request = new DeleteByQueryRequest();
|
||||
request.indices("source1");
|
||||
|
||||
ActionListener<BulkByScrollResponse> listener;
|
||||
// tag::delete-by-query-execute-listener
|
||||
ActionListener<BulkByScrollResponse> listener = new ActionListener<BulkByScrollResponse>() {
|
||||
listener = new ActionListener<BulkByScrollResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkByScrollResponse bulkResponse) {
|
||||
// <1>
|
||||
|
@ -1430,14 +1454,16 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
|
||||
public void afterBulk(long executionId, BulkRequest request,
|
||||
Throwable failure) {
|
||||
// <4>
|
||||
}
|
||||
};
|
||||
|
||||
BulkProcessor bulkProcessor = BulkProcessor.builder(
|
||||
(request, bulkListener) -> client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener),
|
||||
listener).build(); // <5>
|
||||
(request, bulkListener) ->
|
||||
client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener),
|
||||
listener).build(); // <5>
|
||||
// end::bulk-processor-init
|
||||
assertNotNull(bulkProcessor);
|
||||
|
||||
|
@ -1488,7 +1514,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
|
||||
public void afterBulk(long executionId, BulkRequest request,
|
||||
Throwable failure) {
|
||||
logger.error("Failed to execute bulk", failure); // <3>
|
||||
}
|
||||
};
|
||||
|
@ -1496,7 +1523,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
// tag::bulk-processor-options
|
||||
BulkProcessor.Builder builder = BulkProcessor.builder(
|
||||
(request, bulkListener) -> client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener), listener);
|
||||
(request, bulkListener) ->
|
||||
client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener),
|
||||
listener);
|
||||
builder.setBulkActions(500); // <1>
|
||||
builder.setBulkSize(new ByteSizeValue(1L, ByteSizeUnit.MB)); // <2>
|
||||
builder.setConcurrentRequests(0); // <3>
|
||||
|
@ -1563,7 +1592,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
request.setFields("user");
|
||||
|
||||
// tag::term-vectors-execute
|
||||
TermVectorsResponse response = client.termvectors(request, RequestOptions.DEFAULT);
|
||||
TermVectorsResponse response =
|
||||
client.termvectors(request, RequestOptions.DEFAULT);
|
||||
// end::term-vectors-execute
|
||||
|
||||
|
||||
|
@ -1574,16 +1604,17 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
boolean found = response.getFound(); // <4>
|
||||
// end::term-vectors-response
|
||||
|
||||
// tag::term-vectors-term-vectors
|
||||
if (response.getTermVectorsList() != null) {
|
||||
List<TermVectorsResponse.TermVector> tvList = response.getTermVectorsList();
|
||||
for (TermVectorsResponse.TermVector tv : tvList) {
|
||||
// tag::term-vectors-term-vectors
|
||||
for (TermVectorsResponse.TermVector tv : response.getTermVectorsList()) {
|
||||
String fieldname = tv.getFieldName(); // <1>
|
||||
int docCount = tv.getFieldStatistics().getDocCount(); // <2>
|
||||
long sumTotalTermFreq = tv.getFieldStatistics().getSumTotalTermFreq(); // <3>
|
||||
long sumTotalTermFreq =
|
||||
tv.getFieldStatistics().getSumTotalTermFreq(); // <3>
|
||||
long sumDocFreq = tv.getFieldStatistics().getSumDocFreq(); // <4>
|
||||
if (tv.getTerms() != null) {
|
||||
List<TermVectorsResponse.TermVector.Term> terms = tv.getTerms(); // <5>
|
||||
List<TermVectorsResponse.TermVector.Term> terms =
|
||||
tv.getTerms(); // <5>
|
||||
for (TermVectorsResponse.TermVector.Term term : terms) {
|
||||
String termStr = term.getTerm(); // <6>
|
||||
int termFreq = term.getTermFreq(); // <7>
|
||||
|
@ -1591,7 +1622,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
long totalTermFreq = term.getTotalTermFreq(); // <9>
|
||||
float score = term.getScore(); // <10>
|
||||
if (term.getTokens() != null) {
|
||||
List<TermVectorsResponse.TermVector.Token> tokens = term.getTokens(); // <11>
|
||||
List<TermVectorsResponse.TermVector.Token> tokens =
|
||||
term.getTokens(); // <11>
|
||||
for (TermVectorsResponse.TermVector.Token token : tokens) {
|
||||
int position = token.getPosition(); // <12>
|
||||
int startOffset = token.getStartOffset(); // <13>
|
||||
|
@ -1602,11 +1634,12 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
}
|
||||
// end::term-vectors-term-vectors
|
||||
}
|
||||
// end::term-vectors-term-vectors
|
||||
|
||||
ActionListener<TermVectorsResponse> listener;
|
||||
// tag::term-vectors-execute-listener
|
||||
ActionListener<TermVectorsResponse> listener = new ActionListener<TermVectorsResponse>() {
|
||||
listener = new ActionListener<TermVectorsResponse>() {
|
||||
@Override
|
||||
public void onResponse(TermVectorsResponse termVectorsResponse) {
|
||||
// <1>
|
||||
|
@ -1664,7 +1697,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
"index", // <1>
|
||||
"type", // <2>
|
||||
"example_id")); // <3>
|
||||
request.add(new MultiGetRequest.Item("index", "type", "another_id")); // <4>
|
||||
request.add(new MultiGetRequest.Item("index", "type", "another_id")); // <4>
|
||||
// end::multi-get-request
|
||||
|
||||
// Add a missing index so we can test it.
|
||||
|
@ -1715,11 +1748,12 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// TODO status is broken! fix in a followup
|
||||
// assertEquals(RestStatus.NOT_FOUND, ee.status()); // <4>
|
||||
assertThat(e.getMessage(),
|
||||
containsString("reason=no such index [missing_index]")); // <5>
|
||||
containsString("reason=no such index [missing_index]")); // <5>
|
||||
// end::multi-get-indexnotfound
|
||||
|
||||
ActionListener<MultiGetResponse> listener;
|
||||
// tag::multi-get-execute-listener
|
||||
ActionListener<MultiGetResponse> listener = new ActionListener<MultiGetResponse>() {
|
||||
listener = new ActionListener<MultiGetResponse>() {
|
||||
@Override
|
||||
public void onResponse(MultiGetResponse response) {
|
||||
// <1>
|
||||
|
|
|
@ -26,12 +26,12 @@ import org.elasticsearch.client.RequestOptions;
|
|||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.protocol.xpack.graph.Connection;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.protocol.xpack.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.protocol.xpack.graph.Hop;
|
||||
import org.elasticsearch.protocol.xpack.graph.Vertex;
|
||||
import org.elasticsearch.protocol.xpack.graph.VertexRequest;
|
||||
import org.elasticsearch.client.graph.Connection;
|
||||
import org.elasticsearch.client.graph.GraphExploreRequest;
|
||||
import org.elasticsearch.client.graph.GraphExploreResponse;
|
||||
import org.elasticsearch.client.graph.Hop;
|
||||
import org.elasticsearch.client.graph.Vertex;
|
||||
import org.elasticsearch.client.graph.VertexRequest;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -24,9 +24,9 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
|||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse;
|
||||
import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired;
|
||||
import org.elasticsearch.client.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.client.migration.IndexUpgradeInfoResponse;
|
||||
import org.elasticsearch.client.migration.UpgradeActionRequired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
|
|
@ -80,11 +80,13 @@ import org.elasticsearch.client.ml.StartDatafeedRequest;
|
|||
import org.elasticsearch.client.ml.StartDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.StopDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.StopDatafeedResponse;
|
||||
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||
import org.elasticsearch.client.ml.datafeed.ChunkingConfig;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
|
||||
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
|
||||
import org.elasticsearch.client.ml.job.config.AnalysisLimits;
|
||||
import org.elasticsearch.client.ml.job.config.DataDescription;
|
||||
|
@ -630,6 +632,77 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testUpdateDatafeed() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
Job job = MachineLearningIT.buildJob("update-datafeed-job");
|
||||
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
String datafeedId = job.getId() + "-feed";
|
||||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build();
|
||||
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
||||
{
|
||||
AggregatorFactories.Builder aggs = AggregatorFactories.builder();
|
||||
List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList();
|
||||
// tag::update-datafeed-config
|
||||
DatafeedUpdate.Builder datafeedUpdateBuilder = new DatafeedUpdate.Builder(datafeedId) // <1>
|
||||
.setAggregations(aggs) // <2>
|
||||
.setIndices("index_1", "index_2") // <3>
|
||||
.setChunkingConfig(ChunkingConfig.newAuto()) // <4>
|
||||
.setFrequency(TimeValue.timeValueSeconds(30)) // <5>
|
||||
.setQuery(QueryBuilders.matchAllQuery()) // <6>
|
||||
.setQueryDelay(TimeValue.timeValueMinutes(1)) // <7>
|
||||
.setScriptFields(scriptFields) // <8>
|
||||
.setScrollSize(1000) // <9>
|
||||
.setJobId("update-datafeed-job"); // <10>
|
||||
// end::update-datafeed-config
|
||||
|
||||
// Clearing aggregation to avoid complex validation rules
|
||||
datafeedUpdateBuilder.setAggregations((String) null);
|
||||
|
||||
// tag::update-datafeed-request
|
||||
UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdateBuilder.build()); // <1>
|
||||
// end::update-datafeed-request
|
||||
|
||||
// tag::update-datafeed-execute
|
||||
PutDatafeedResponse response = client.machineLearning().updateDatafeed(request, RequestOptions.DEFAULT);
|
||||
// end::update-datafeed-execute
|
||||
|
||||
// tag::update-datafeed-response
|
||||
DatafeedConfig updatedDatafeed = response.getResponse(); // <1>
|
||||
// end::update-datafeed-response
|
||||
assertThat(updatedDatafeed.getId(), equalTo(datafeedId));
|
||||
}
|
||||
{
|
||||
DatafeedUpdate datafeedUpdate = new DatafeedUpdate.Builder(datafeedId).setIndices("index_1", "index_2").build();
|
||||
|
||||
UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdate);
|
||||
// tag::update-datafeed-execute-listener
|
||||
ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() {
|
||||
@Override
|
||||
public void onResponse(PutDatafeedResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::update-datafeed-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::update-datafeed-execute-async
|
||||
client.machineLearning().updateDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::update-datafeed-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetDatafeed() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.client.Request;
|
|||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.RollupClient;
|
||||
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.DeleteRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.GetRollupCapsRequest;
|
||||
|
@ -46,6 +47,8 @@ import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
|||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.RollableIndexCaps;
|
||||
import org.elasticsearch.client.rollup.RollupJobCaps;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobRequest;
|
||||
import org.elasticsearch.client.rollup.StartRollupJobResponse;
|
||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.GroupConfig;
|
||||
import org.elasticsearch.client.rollup.job.config.HistogramGroupConfig;
|
||||
|
@ -186,6 +189,7 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetRollupJob() throws Exception {
|
||||
testCreateRollupJob();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
@ -236,6 +240,62 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testStartRollupJob() throws Exception {
|
||||
testCreateRollupJob();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
String id = "job_1";
|
||||
// tag::rollup-start-job-request
|
||||
StartRollupJobRequest request = new StartRollupJobRequest(id); // <1>
|
||||
// end::rollup-start-job-request
|
||||
|
||||
|
||||
try {
|
||||
// tag::rollup-start-job-execute
|
||||
RollupClient rc = client.rollup();
|
||||
StartRollupJobResponse response = rc.startRollupJob(request, RequestOptions.DEFAULT);
|
||||
// end::rollup-start-job-execute
|
||||
|
||||
// tag::rollup-start-job-response
|
||||
response.isAcknowledged(); // <1>
|
||||
// end::rollup-start-job-response
|
||||
} catch (Exception e) {
|
||||
// Swallow any exception, this test does not test actually cancelling.
|
||||
}
|
||||
|
||||
// tag::rollup-start-job-execute-listener
|
||||
ActionListener<StartRollupJobResponse> listener = new ActionListener<StartRollupJobResponse>() {
|
||||
@Override
|
||||
public void onResponse(StartRollupJobResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::rollup-start-job-execute-listener
|
||||
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::rollup-start-job-execute-async
|
||||
RollupClient rc = client.rollup();
|
||||
rc.startRollupJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::rollup-start-job-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
|
||||
// stop job so it can correctly be deleted by the test teardown
|
||||
// TODO Replace this with the Rollup Stop Job API
|
||||
Response stoptResponse = client().performRequest(new Request("POST", "/_xpack/rollup/job/" + id + "/_stop"));
|
||||
assertEquals(RestStatus.OK.getStatus(), stoptResponse.getStatusLine().getStatusCode());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetRollupCaps() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -329,6 +389,7 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
ActionListener<GetRollupCapsResponse> listener = new ActionListener<GetRollupCapsResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetRollupCapsResponse response) {
|
||||
|
||||
// <1>
|
||||
}
|
||||
|
||||
|
@ -406,6 +467,7 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testDeleteRollupJob() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -450,4 +512,4 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -24,6 +24,7 @@ import org.apache.http.entity.ContentType;
|
|||
import org.apache.http.nio.entity.NStringEntity;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
|
@ -31,6 +32,8 @@ import org.elasticsearch.client.RestHighLevelClient;
|
|||
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||
import org.elasticsearch.client.security.ClearRolesCacheRequest;
|
||||
import org.elasticsearch.client.security.ClearRolesCacheResponse;
|
||||
import org.elasticsearch.client.security.CreateTokenRequest;
|
||||
import org.elasticsearch.client.security.CreateTokenResponse;
|
||||
import org.elasticsearch.client.security.DeleteRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.DeleteRoleMappingResponse;
|
||||
import org.elasticsearch.client.security.DeleteRoleRequest;
|
||||
|
@ -38,6 +41,9 @@ import org.elasticsearch.client.security.DeleteRoleResponse;
|
|||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EmptyResponse;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.ExpressionRoleMapping;
|
||||
import org.elasticsearch.client.security.GetRoleMappingsRequest;
|
||||
import org.elasticsearch.client.security.GetRoleMappingsResponse;
|
||||
import org.elasticsearch.client.security.GetSslCertificatesResponse;
|
||||
import org.elasticsearch.client.security.PutRoleMappingRequest;
|
||||
import org.elasticsearch.client.security.PutRoleMappingResponse;
|
||||
|
@ -54,14 +60,20 @@ import org.hamcrest.Matchers;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.isIn;
|
||||
|
||||
public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
|
@ -165,6 +177,119 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testGetRoleMappings() throws Exception {
|
||||
final RestHighLevelClient client = highLevelClient();
|
||||
|
||||
final RoleMapperExpression rules1 = AnyRoleMapperExpression.builder().addExpression(FieldRoleMapperExpression.ofUsername("*"))
|
||||
.addExpression(FieldRoleMapperExpression.ofGroups("cn=admins,dc=example,dc=com")).build();
|
||||
final PutRoleMappingRequest putRoleMappingRequest1 = new PutRoleMappingRequest("mapping-example-1", true, Collections.singletonList(
|
||||
"superuser"), rules1, null, RefreshPolicy.NONE);
|
||||
final PutRoleMappingResponse putRoleMappingResponse1 = client.security().putRoleMapping(putRoleMappingRequest1,
|
||||
RequestOptions.DEFAULT);
|
||||
boolean isCreated1 = putRoleMappingResponse1.isCreated();
|
||||
assertTrue(isCreated1);
|
||||
final RoleMapperExpression rules2 = AnyRoleMapperExpression.builder().addExpression(FieldRoleMapperExpression.ofGroups(
|
||||
"cn=admins,dc=example,dc=com")).build();
|
||||
final Map<String, Object> metadata2 = new HashMap<>();
|
||||
metadata2.put("k1", "v1");
|
||||
final PutRoleMappingRequest putRoleMappingRequest2 = new PutRoleMappingRequest("mapping-example-2", true, Collections.singletonList(
|
||||
"monitoring"), rules2, metadata2, RefreshPolicy.NONE);
|
||||
final PutRoleMappingResponse putRoleMappingResponse2 = client.security().putRoleMapping(putRoleMappingRequest2,
|
||||
RequestOptions.DEFAULT);
|
||||
boolean isCreated2 = putRoleMappingResponse2.isCreated();
|
||||
assertTrue(isCreated2);
|
||||
|
||||
{
|
||||
// tag::get-role-mappings-execute
|
||||
final GetRoleMappingsRequest request = new GetRoleMappingsRequest("mapping-example-1");
|
||||
final GetRoleMappingsResponse response = client.security().getRoleMappings(request, RequestOptions.DEFAULT);
|
||||
// end::get-role-mappings-execute
|
||||
// tag::get-role-mappings-response
|
||||
List<ExpressionRoleMapping> mappings = response.getMappings();
|
||||
// end::get-role-mappings-response
|
||||
assertNotNull(mappings);
|
||||
assertThat(mappings.size(), is(1));
|
||||
assertThat(mappings.get(0).isEnabled(), is(true));
|
||||
assertThat(mappings.get(0).getName(), is("mapping-example-1"));
|
||||
assertThat(mappings.get(0).getExpression(), equalTo(rules1));
|
||||
assertThat(mappings.get(0).getMetadata(), equalTo(Collections.emptyMap()));
|
||||
assertThat(mappings.get(0).getRoles(), contains("superuser"));
|
||||
}
|
||||
|
||||
{
|
||||
// tag::get-role-mappings-list-execute
|
||||
final GetRoleMappingsRequest request = new GetRoleMappingsRequest("mapping-example-1", "mapping-example-2");
|
||||
final GetRoleMappingsResponse response = client.security().getRoleMappings(request, RequestOptions.DEFAULT);
|
||||
// end::get-role-mappings-list-execute
|
||||
List<ExpressionRoleMapping> mappings = response.getMappings();
|
||||
assertNotNull(mappings);
|
||||
assertThat(mappings.size(), is(2));
|
||||
for (ExpressionRoleMapping roleMapping : mappings) {
|
||||
assertThat(roleMapping.isEnabled(), is(true));
|
||||
assertThat(roleMapping.getName(), isIn(new String[] { "mapping-example-1", "mapping-example-2" }));
|
||||
if (roleMapping.getName().equals("mapping-example-1")) {
|
||||
assertThat(roleMapping.getMetadata(), equalTo(Collections.emptyMap()));
|
||||
assertThat(roleMapping.getExpression(), equalTo(rules1));
|
||||
assertThat(roleMapping.getRoles(), contains("superuser"));
|
||||
} else {
|
||||
assertThat(roleMapping.getMetadata(), equalTo(metadata2));
|
||||
assertThat(roleMapping.getExpression(), equalTo(rules2));
|
||||
assertThat(roleMapping.getRoles(), contains("monitoring"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
// tag::get-role-mappings-all-execute
|
||||
final GetRoleMappingsRequest request = new GetRoleMappingsRequest();
|
||||
final GetRoleMappingsResponse response = client.security().getRoleMappings(request, RequestOptions.DEFAULT);
|
||||
// end::get-role-mappings-all-execute
|
||||
List<ExpressionRoleMapping> mappings = response.getMappings();
|
||||
assertNotNull(mappings);
|
||||
assertThat(mappings.size(), is(2));
|
||||
for (ExpressionRoleMapping roleMapping : mappings) {
|
||||
assertThat(roleMapping.isEnabled(), is(true));
|
||||
assertThat(roleMapping.getName(), isIn(new String[] { "mapping-example-1", "mapping-example-2" }));
|
||||
if (roleMapping.getName().equals("mapping-example-1")) {
|
||||
assertThat(roleMapping.getMetadata(), equalTo(Collections.emptyMap()));
|
||||
assertThat(roleMapping.getExpression(), equalTo(rules1));
|
||||
assertThat(roleMapping.getRoles(), contains("superuser"));
|
||||
} else {
|
||||
assertThat(roleMapping.getMetadata(), equalTo(metadata2));
|
||||
assertThat(roleMapping.getExpression(), equalTo(rules2));
|
||||
assertThat(roleMapping.getRoles(), contains("monitoring"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
final GetRoleMappingsRequest request = new GetRoleMappingsRequest();
|
||||
// tag::get-role-mappings-execute-listener
|
||||
ActionListener<GetRoleMappingsResponse> listener = new ActionListener<GetRoleMappingsResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetRoleMappingsResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::get-role-mappings-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::get-role-mappings-execute-async
|
||||
client.security().getRoleMappingsAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::get-role-mappings-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testEnableUser() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
|
||||
|
@ -546,4 +671,79 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
client().performRequest(addRoleRequest);
|
||||
}
|
||||
|
||||
public void testCreateToken() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
// Setup user
|
||||
PutUserRequest putUserRequest = new PutUserRequest("token_user", "password".toCharArray(),
|
||||
Collections.singletonList("kibana_user"), null, null, true, null, RefreshPolicy.IMMEDIATE);
|
||||
PutUserResponse putUserResponse = client.security().putUser(putUserRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(putUserResponse.isCreated());
|
||||
}
|
||||
{
|
||||
// tag::create-token-password-request
|
||||
final char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
|
||||
CreateTokenRequest createTokenRequest = CreateTokenRequest.passwordGrant("token_user", password);
|
||||
// end::create-token-password-request
|
||||
|
||||
// tag::create-token-execute
|
||||
CreateTokenResponse createTokenResponse = client.security().createToken(createTokenRequest, RequestOptions.DEFAULT);
|
||||
// end::create-token-execute
|
||||
|
||||
// tag::create-token-response
|
||||
String accessToken = createTokenResponse.getAccessToken(); // <1>
|
||||
String refreshToken = createTokenResponse.getRefreshToken(); // <2>
|
||||
// end::create-token-response
|
||||
assertNotNull(accessToken);
|
||||
assertNotNull(refreshToken);
|
||||
assertNotNull(createTokenResponse.getExpiresIn());
|
||||
|
||||
// tag::create-token-refresh-request
|
||||
createTokenRequest = CreateTokenRequest.refreshTokenGrant(refreshToken);
|
||||
// end::create-token-refresh-request
|
||||
|
||||
CreateTokenResponse refreshResponse = client.security().createToken(createTokenRequest, RequestOptions.DEFAULT);
|
||||
assertNotNull(refreshResponse.getAccessToken());
|
||||
assertNotNull(refreshResponse.getRefreshToken());
|
||||
}
|
||||
|
||||
{
|
||||
// tag::create-token-client-credentials-request
|
||||
CreateTokenRequest createTokenRequest = CreateTokenRequest.clientCredentialsGrant();
|
||||
// end::create-token-client-credentials-request
|
||||
|
||||
ActionListener<CreateTokenResponse> listener;
|
||||
//tag::create-token-execute-listener
|
||||
listener = new ActionListener<CreateTokenResponse>() {
|
||||
@Override
|
||||
public void onResponse(CreateTokenResponse createTokenResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
//end::create-token-execute-listener
|
||||
|
||||
// Avoid unused variable warning
|
||||
assertNotNull(listener);
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final PlainActionFuture<CreateTokenResponse> future = new PlainActionFuture<>();
|
||||
listener = future;
|
||||
|
||||
//tag::create-token-execute-async
|
||||
client.security().createTokenAsync(createTokenRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||
//end::create-token-execute-async
|
||||
|
||||
assertNotNull(future.get(30, TimeUnit.SECONDS));
|
||||
assertNotNull(future.get().getAccessToken());
|
||||
// "client-credentials" grants aren't refreshable
|
||||
assertNull(future.get().getRefreshToken());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,10 +40,10 @@ import org.elasticsearch.client.watcher.WatchStatus;
|
|||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.client.watcher.DeleteWatchResponse;
|
||||
import org.elasticsearch.client.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.client.watcher.PutWatchResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.graph;
|
||||
package org.elasticsearch.client.graph;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
|
@ -35,7 +35,7 @@ import java.util.function.Supplier;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class GraphExploreResponseTests extends AbstractXContentTestCase< GraphExploreResponse> {
|
||||
public class GraphExploreResponseTests extends AbstractXContentTestCase<GraphExploreResponse> {
|
||||
|
||||
@Override
|
||||
protected GraphExploreResponse createTestInstance() {
|
|
@ -17,13 +17,16 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
apply plugin: 'elasticsearch.build'
|
||||
package org.elasticsearch.client.migration;
|
||||
|
||||
description = 'Request and Response objects for x-pack that are used by the' +
|
||||
' high level rest client and x-pack itself'
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
dependencies {
|
||||
compileOnly "org.elasticsearch:elasticsearch:${version}"
|
||||
public class IndexUpgradeInfoRequestTests extends ESTestCase {
|
||||
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
// TODO: add to cross XPack-HLRC serialization test
|
||||
|
||||
public void testNullIndices() {
|
||||
expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest((String[])null));
|
||||
expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest().indices((String[])null));
|
||||
}
|
||||
}
|
|
@ -17,7 +17,10 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Request and Response objects for miscellaneous X-Pack APIs.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack;
|
||||
package org.elasticsearch.client.migration;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class IndexUpgradeInfoResponseTests extends ESTestCase {
|
||||
// TODO: add to cross XPack-HLRC serialization test
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
|
||||
import org.elasticsearch.client.ml.datafeed.DatafeedUpdateTests;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
|
||||
public class UpdateDatafeedRequestTests extends AbstractXContentTestCase<UpdateDatafeedRequest> {
|
||||
|
||||
@Override
|
||||
protected UpdateDatafeedRequest createTestInstance() {
|
||||
return new UpdateDatafeedRequest(DatafeedUpdateTests.createRandom());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UpdateDatafeedRequest doParseInstance(XContentParser parser) {
|
||||
return new UpdateDatafeedRequest(DatafeedUpdate.PARSER.apply(parser, null).build());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -32,8 +32,7 @@ import java.util.List;
|
|||
|
||||
public class DatafeedUpdateTests extends AbstractXContentTestCase<DatafeedUpdate> {
|
||||
|
||||
@Override
|
||||
protected DatafeedUpdate createTestInstance() {
|
||||
public static DatafeedUpdate createRandom() {
|
||||
DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(DatafeedConfigTests.randomValidDatafeedId());
|
||||
if (randomBoolean()) {
|
||||
builder.setJobId(randomAlphaOfLength(10));
|
||||
|
@ -87,6 +86,11 @@ public class DatafeedUpdateTests extends AbstractXContentTestCase<DatafeedUpdate
|
|||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DatafeedUpdate createTestInstance() {
|
||||
return createRandom();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DatafeedUpdate doParseInstance(XContentParser parser) {
|
||||
return DatafeedUpdate.PARSER.apply(parser, null).build();
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
public class StartRollupJobRequestTests extends ESTestCase {
|
||||
|
||||
public void testConstructor() {
|
||||
String jobId = randomAlphaOfLength(5);
|
||||
assertEquals(jobId, new StartRollupJobRequest(jobId).getJobId());
|
||||
}
|
||||
|
||||
public void testEqualsAndHash() {
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(new StartRollupJobRequest(randomAlphaOfLength(5)),
|
||||
orig -> new StartRollupJobRequest(orig.getJobId()),
|
||||
orig -> new StartRollupJobRequest(orig.getJobId() + "_suffix"));
|
||||
}
|
||||
|
||||
public void testRequireJobId() {
|
||||
final NullPointerException e = expectThrows(NullPointerException.class, ()-> new StartRollupJobRequest(null));
|
||||
assertEquals("id parameter must not be null", e.getMessage());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class StartRollupJobResponseTests extends AbstractXContentTestCase<StartRollupJobResponse> {
|
||||
|
||||
private boolean acknowledged;
|
||||
|
||||
@Before
|
||||
public void setupAcknoledged() {
|
||||
acknowledged = randomBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StartRollupJobResponse createTestInstance() {
|
||||
return new StartRollupJobResponse(acknowledged);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StartRollupJobResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return StartRollupJobResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,98 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class CreateTokenRequestTests extends ESTestCase {
|
||||
|
||||
public void testCreateTokenFromPassword() {
|
||||
final CreateTokenRequest request = CreateTokenRequest.passwordGrant("jsmith", "top secret password".toCharArray());
|
||||
assertThat(request.getGrantType(), equalTo("password"));
|
||||
assertThat(request.getUsername(), equalTo("jsmith"));
|
||||
assertThat(new String(request.getPassword()), equalTo("top secret password"));
|
||||
assertThat(request.getScope(), nullValue());
|
||||
assertThat(request.getRefreshToken(), nullValue());
|
||||
assertThat(Strings.toString(request), equalTo("{" +
|
||||
"\"grant_type\":\"password\"," +
|
||||
"\"username\":\"jsmith\"," +
|
||||
"\"password\":\"top secret password\"" +
|
||||
"}"
|
||||
));
|
||||
}
|
||||
|
||||
public void testCreateTokenFromRefreshToken() {
|
||||
final CreateTokenRequest request = CreateTokenRequest.refreshTokenGrant("9a7f41cf-9918-4d1f-bfaa-ad3f8f9f02b9");
|
||||
assertThat(request.getGrantType(), equalTo("refresh_token"));
|
||||
assertThat(request.getRefreshToken(), equalTo("9a7f41cf-9918-4d1f-bfaa-ad3f8f9f02b9"));
|
||||
assertThat(request.getScope(), nullValue());
|
||||
assertThat(request.getUsername(), nullValue());
|
||||
assertThat(request.getPassword(), nullValue());
|
||||
assertThat(Strings.toString(request), equalTo("{" +
|
||||
"\"grant_type\":\"refresh_token\"," +
|
||||
"\"refresh_token\":\"9a7f41cf-9918-4d1f-bfaa-ad3f8f9f02b9\"" +
|
||||
"}"
|
||||
));
|
||||
}
|
||||
|
||||
public void testCreateTokenFromClientCredentials() {
|
||||
final CreateTokenRequest request = CreateTokenRequest.clientCredentialsGrant();
|
||||
assertThat(request.getGrantType(), equalTo("client_credentials"));
|
||||
assertThat(request.getScope(), nullValue());
|
||||
assertThat(request.getUsername(), nullValue());
|
||||
assertThat(request.getPassword(), nullValue());
|
||||
assertThat(request.getRefreshToken(), nullValue());
|
||||
assertThat(Strings.toString(request), equalTo("{\"grant_type\":\"client_credentials\"}"));
|
||||
}
|
||||
|
||||
public void testEqualsAndHashCode() {
|
||||
final String grantType = randomAlphaOfLength(8);
|
||||
final String scope = randomBoolean() ? null : randomAlphaOfLength(6);
|
||||
final String username = randomBoolean() ? null : randomAlphaOfLengthBetween(4, 10);
|
||||
final char[] password = randomBoolean() ? null : randomAlphaOfLengthBetween(8, 12).toCharArray();
|
||||
final String refreshToken = randomBoolean() ? null : randomAlphaOfLengthBetween(12, 24);
|
||||
final CreateTokenRequest request = new CreateTokenRequest(grantType, scope, username, password, refreshToken);
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(request,
|
||||
r -> new CreateTokenRequest(r.getGrantType(), r.getScope(), r.getUsername(), r.getPassword(), r.getRefreshToken()),
|
||||
this::mutate);
|
||||
}
|
||||
|
||||
private CreateTokenRequest mutate(CreateTokenRequest req) {
|
||||
switch (randomIntBetween(1, 5)) {
|
||||
case 1:
|
||||
return new CreateTokenRequest("g", req.getScope(), req.getUsername(), req.getPassword(), req.getRefreshToken());
|
||||
case 2:
|
||||
return new CreateTokenRequest(req.getGrantType(), "s", req.getUsername(), req.getPassword(), req.getRefreshToken());
|
||||
case 3:
|
||||
return new CreateTokenRequest(req.getGrantType(), req.getScope(), "u", req.getPassword(), req.getRefreshToken());
|
||||
case 4:
|
||||
final char[] password = {'p'};
|
||||
return new CreateTokenRequest(req.getGrantType(), req.getScope(), req.getUsername(), password, req.getRefreshToken());
|
||||
case 5:
|
||||
return new CreateTokenRequest(req.getGrantType(), req.getScope(), req.getUsername(), req.getPassword(), "r");
|
||||
}
|
||||
throw new IllegalStateException("Bad random number");
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class CreateTokenResponseTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
final String accessToken = randomAlphaOfLengthBetween(12, 24);
|
||||
final TimeValue expiresIn = TimeValue.timeValueSeconds(randomIntBetween(30, 10_000));
|
||||
final String refreshToken = randomBoolean() ? null : randomAlphaOfLengthBetween(12, 24);
|
||||
final String scope = randomBoolean() ? null : randomAlphaOfLength(4);
|
||||
final String type = randomAlphaOfLength(6);
|
||||
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
final XContentBuilder builder = XContentFactory.contentBuilder(xContentType);
|
||||
builder.startObject()
|
||||
.field("access_token", accessToken)
|
||||
.field("type", type)
|
||||
.field("expires_in", expiresIn.seconds());
|
||||
if (refreshToken != null || randomBoolean()) {
|
||||
builder.field("refresh_token", refreshToken);
|
||||
}
|
||||
if (scope != null || randomBoolean()) {
|
||||
builder.field("scope", scope);
|
||||
}
|
||||
builder.endObject();
|
||||
BytesReference xContent = BytesReference.bytes(builder);
|
||||
|
||||
final CreateTokenResponse response = CreateTokenResponse.fromXContent(createParser(xContentType.xContent(), xContent));
|
||||
assertThat(response.getAccessToken(), equalTo(accessToken));
|
||||
assertThat(response.getRefreshToken(), equalTo(refreshToken));
|
||||
assertThat(response.getScope(), equalTo(scope));
|
||||
assertThat(response.getType(), equalTo(type));
|
||||
assertThat(response.getExpiresIn(), equalTo(expiresIn));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ExpressionRoleMappingTests extends ESTestCase {
|
||||
|
||||
public void testExpressionRoleMappingParser() throws IOException {
|
||||
final String json =
|
||||
"{\n" +
|
||||
" \"enabled\" : true,\n" +
|
||||
" \"roles\" : [\n" +
|
||||
" \"superuser\"\n" +
|
||||
" ],\n" +
|
||||
" \"rules\" : {\n" +
|
||||
" \"field\" : {\n" +
|
||||
" \"realm.name\" : \"kerb1\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"metadata\" : { }\n" +
|
||||
" }";
|
||||
final ExpressionRoleMapping expressionRoleMapping = ExpressionRoleMapping.PARSER.parse(XContentType.JSON.xContent().createParser(
|
||||
new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
|
||||
@Override
|
||||
public void usedDeprecatedName(String usedName, String modernName) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void usedDeprecatedField(String usedName, String replacedWith) {
|
||||
}
|
||||
}, json), "example-role-mapping");
|
||||
final ExpressionRoleMapping expectedRoleMapping = new ExpressionRoleMapping("example-role-mapping", FieldRoleMapperExpression
|
||||
.ofKeyValues("realm.name", "kerb1"), Collections.singletonList("superuser"), null, true);
|
||||
assertThat(expressionRoleMapping, equalTo(expectedRoleMapping));
|
||||
}
|
||||
|
||||
public void testEqualsHashCode() {
|
||||
final ExpressionRoleMapping expressionRoleMapping = new ExpressionRoleMapping("kerberosmapping", FieldRoleMapperExpression
|
||||
.ofKeyValues("realm.name", "kerb1"), Collections.singletonList("superuser"), null, true);
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(expressionRoleMapping, (original) -> {
|
||||
return new ExpressionRoleMapping(original.getName(), original.getExpression(), original.getRoles(), original.getMetadata(),
|
||||
original.isEnabled());
|
||||
});
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(expressionRoleMapping, (original) -> {
|
||||
return new ExpressionRoleMapping(original.getName(), original.getExpression(), original.getRoles(), original.getMetadata(),
|
||||
original.isEnabled());
|
||||
}, ExpressionRoleMappingTests::mutateTestItem);
|
||||
}
|
||||
|
||||
private static ExpressionRoleMapping mutateTestItem(ExpressionRoleMapping original) {
|
||||
ExpressionRoleMapping mutated = null;
|
||||
switch (randomIntBetween(0, 4)) {
|
||||
case 0:
|
||||
mutated = new ExpressionRoleMapping("namechanged", FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"), Collections
|
||||
.singletonList("superuser"), null, true);
|
||||
break;
|
||||
case 1:
|
||||
mutated = new ExpressionRoleMapping("kerberosmapping", FieldRoleMapperExpression.ofKeyValues("changed", "changed"), Collections
|
||||
.singletonList("superuser"), null, true);
|
||||
break;
|
||||
case 2:
|
||||
mutated = new ExpressionRoleMapping("kerberosmapping", FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"), Collections
|
||||
.singletonList("changed"), null, true);
|
||||
break;
|
||||
case 3:
|
||||
Map<String, Object> metadata = new HashMap<>();
|
||||
metadata.put("a", "b");
|
||||
mutated = new ExpressionRoleMapping("kerberosmapping", FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"), Collections
|
||||
.singletonList("superuser"), metadata, true);
|
||||
break;
|
||||
case 4:
|
||||
mutated = new ExpressionRoleMapping("kerberosmapping", FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"), Collections
|
||||
.singletonList("superuser"), null, false);
|
||||
break;
|
||||
}
|
||||
return mutated;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class GetRoleMappingsRequestTests extends ESTestCase {
|
||||
|
||||
public void testGetRoleMappingsRequest() {
|
||||
int noOfRoleMappingNames = randomIntBetween(0, 2);
|
||||
final String[] roleMappingNames = randomArray(noOfRoleMappingNames, noOfRoleMappingNames, String[]::new, () -> randomAlphaOfLength(
|
||||
5));
|
||||
final GetRoleMappingsRequest getRoleMappingsRequest = new GetRoleMappingsRequest(roleMappingNames);
|
||||
assertThat(getRoleMappingsRequest.getRoleMappingNames().size(), is(noOfRoleMappingNames));
|
||||
assertThat(getRoleMappingsRequest.getRoleMappingNames(), containsInAnyOrder(roleMappingNames));
|
||||
}
|
||||
|
||||
public void testEqualsHashCode() {
|
||||
int noOfRoleMappingNames = randomIntBetween(0, 2);
|
||||
final String[] roleMappingNames = randomArray(noOfRoleMappingNames, String[]::new, () -> randomAlphaOfLength(5));
|
||||
final GetRoleMappingsRequest getRoleMappingsRequest = new GetRoleMappingsRequest(roleMappingNames);
|
||||
assertNotNull(getRoleMappingsRequest);
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(getRoleMappingsRequest, (original) -> {
|
||||
return new GetRoleMappingsRequest(original.getRoleMappingNames().toArray(new String[0]));
|
||||
});
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(getRoleMappingsRequest, (original) -> {
|
||||
return new GetRoleMappingsRequest(original.getRoleMappingNames().toArray(new String[0]));
|
||||
}, GetRoleMappingsRequestTests::mutateTestItem);
|
||||
}
|
||||
|
||||
private static GetRoleMappingsRequest mutateTestItem(GetRoleMappingsRequest original) {
|
||||
return new GetRoleMappingsRequest(randomAlphaOfLength(8));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class GetRoleMappingsResponseTests extends ESTestCase {
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
final String json = "{\n" +
|
||||
" \"kerberosmapping\" : {\n" +
|
||||
" \"enabled\" : true,\n" +
|
||||
" \"roles\" : [\n" +
|
||||
" \"superuser\"\n" +
|
||||
" ],\n" +
|
||||
" \"rules\" : {\n" +
|
||||
" \"field\" : {\n" +
|
||||
" \"realm.name\" : \"kerb1\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"metadata\" : { }\n" +
|
||||
" },\n" +
|
||||
" \"ldapmapping\" : {\n" +
|
||||
" \"enabled\" : false,\n" +
|
||||
" \"roles\" : [\n" +
|
||||
" \"monitoring\"\n" +
|
||||
" ],\n" +
|
||||
" \"rules\" : {\n" +
|
||||
" \"field\" : {\n" +
|
||||
" \"groups\" : \"cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"metadata\" : { }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
final GetRoleMappingsResponse response = GetRoleMappingsResponse.fromXContent(XContentType.JSON.xContent().createParser(
|
||||
new NamedXContentRegistry(Collections.emptyList()), new DeprecationHandler() {
|
||||
@Override
|
||||
public void usedDeprecatedName(String usedName, String modernName) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void usedDeprecatedField(String usedName, String replacedWith) {
|
||||
}
|
||||
}, json));
|
||||
final List<ExpressionRoleMapping> expectedRoleMappingsList = new ArrayList<>();
|
||||
expectedRoleMappingsList.add(new ExpressionRoleMapping("kerberosmapping", FieldRoleMapperExpression.ofKeyValues("realm.name",
|
||||
"kerb1"), Collections.singletonList("superuser"), null, true));
|
||||
expectedRoleMappingsList.add(new ExpressionRoleMapping("ldapmapping", FieldRoleMapperExpression.ofGroups(
|
||||
"cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"), Collections.singletonList("monitoring"), null, false));
|
||||
final GetRoleMappingsResponse expectedResponse = new GetRoleMappingsResponse(expectedRoleMappingsList);
|
||||
assertThat(response, equalTo(expectedResponse));
|
||||
}
|
||||
|
||||
public void testEqualsHashCode() {
|
||||
final List<ExpressionRoleMapping> roleMappingsList = new ArrayList<>();
|
||||
roleMappingsList.add(new ExpressionRoleMapping("kerberosmapping", FieldRoleMapperExpression.ofKeyValues("realm.name",
|
||||
"kerb1"), Collections.singletonList("superuser"), null, true));
|
||||
final GetRoleMappingsResponse response = new GetRoleMappingsResponse(roleMappingsList);
|
||||
assertNotNull(response);
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(response, (original) -> {
|
||||
return new GetRoleMappingsResponse(original.getMappings());
|
||||
});
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(response, (original) -> {
|
||||
return new GetRoleMappingsResponse(original.getMappings());
|
||||
}, GetRoleMappingsResponseTests::mutateTestItem);
|
||||
}
|
||||
|
||||
private static GetRoleMappingsResponse mutateTestItem(GetRoleMappingsResponse original) {
|
||||
GetRoleMappingsResponse mutated = null;
|
||||
switch(randomIntBetween(0, 1)) {
|
||||
case 0:
|
||||
final List<ExpressionRoleMapping> roleMappingsList1 = new ArrayList<>();
|
||||
roleMappingsList1.add(new ExpressionRoleMapping("ldapmapping", FieldRoleMapperExpression.ofGroups(
|
||||
"cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"), Collections.singletonList("monitoring"), null, false));
|
||||
mutated = new GetRoleMappingsResponse(roleMappingsList1);
|
||||
break;
|
||||
case 1:
|
||||
final List<ExpressionRoleMapping> roleMappingsList2 = new ArrayList<>();
|
||||
ExpressionRoleMapping orginialRoleMapping = original.getMappings().get(0);
|
||||
roleMappingsList2.add(new ExpressionRoleMapping(orginialRoleMapping.getName(), FieldRoleMapperExpression.ofGroups(
|
||||
"cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"),
|
||||
orginialRoleMapping.getRoles(), orginialRoleMapping.getMetadata(), !orginialRoleMapping.isEnabled()));
|
||||
mutated = new GetRoleMappingsResponse(roleMappingsList2);
|
||||
break;
|
||||
}
|
||||
return mutated;
|
||||
}
|
||||
}
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.watcher;
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.protocol.xpack.watcher;
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
|
@ -19,19 +19,15 @@
|
|||
|
||||
package org.elasticsearch.client.watcher;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ValidationException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class WatchRequestValidationTests extends ESTestCase {
|
||||
|
||||
|
@ -61,38 +57,38 @@ public class WatchRequestValidationTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testDeleteWatchInvalidWatchId() {
|
||||
ActionRequestValidationException e = new DeleteWatchRequest("id with whitespaces").validate();
|
||||
assertThat(e, is(notNullValue()));
|
||||
assertThat(e.validationErrors(), hasItem("watch id contains whitespace"));
|
||||
final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
|
||||
() -> new DeleteWatchRequest("id with whitespaces"));
|
||||
assertThat(exception.getMessage(), is("watch id contains whitespace"));
|
||||
}
|
||||
|
||||
public void testDeleteWatchNullId() {
|
||||
ActionRequestValidationException e = new DeleteWatchRequest(null).validate();
|
||||
assertThat(e, is(notNullValue()));
|
||||
assertThat(e.validationErrors(), hasItem("watch id is missing"));
|
||||
final NullPointerException exception = expectThrows(NullPointerException.class,
|
||||
() -> new DeleteWatchRequest(null));
|
||||
assertThat(exception.getMessage(), is("watch id is missing"));
|
||||
}
|
||||
|
||||
public void testPutWatchInvalidWatchId() {
|
||||
ActionRequestValidationException e = new PutWatchRequest("id with whitespaces", BytesArray.EMPTY, XContentType.JSON).validate();
|
||||
assertThat(e, is(notNullValue()));
|
||||
assertThat(e.validationErrors(), hasItem("watch id contains whitespace"));
|
||||
final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
|
||||
() -> new PutWatchRequest("id with whitespaces", BytesArray.EMPTY, XContentType.JSON));
|
||||
assertThat(exception.getMessage(), is("watch id contains whitespace"));
|
||||
}
|
||||
|
||||
public void testPutWatchNullId() {
|
||||
ActionRequestValidationException e = new PutWatchRequest(null, BytesArray.EMPTY, XContentType.JSON).validate();
|
||||
assertThat(e, is(notNullValue()));
|
||||
assertThat(e.validationErrors(), hasItem("watch id is missing"));
|
||||
final NullPointerException exception = expectThrows(NullPointerException.class,
|
||||
() -> new PutWatchRequest(null, BytesArray.EMPTY, XContentType.JSON));
|
||||
assertThat(exception.getMessage(), is("watch id is missing"));
|
||||
}
|
||||
|
||||
public void testPutWatchSourceNull() {
|
||||
ActionRequestValidationException e = new PutWatchRequest("foo", null, XContentType.JSON).validate();
|
||||
assertThat(e, is(notNullValue()));
|
||||
assertThat(e.validationErrors(), hasItem("watch source is missing"));
|
||||
final NullPointerException exception = expectThrows(NullPointerException.class,
|
||||
() -> new PutWatchRequest("foo", null, XContentType.JSON));
|
||||
assertThat(exception.getMessage(), is("watch source is missing"));
|
||||
}
|
||||
|
||||
public void testPutWatchContentNull() {
|
||||
ActionRequestValidationException e = new PutWatchRequest("foo", BytesArray.EMPTY, null).validate();
|
||||
assertThat(e, is(notNullValue()));
|
||||
assertThat(e.validationErrors(), hasItem("request body is missing"));
|
||||
final NullPointerException exception = expectThrows(NullPointerException.class,
|
||||
() -> new PutWatchRequest("foo", BytesArray.EMPTY, null));
|
||||
assertThat(exception.getMessage(), is("request body is missing"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,6 +54,11 @@ ifeval::["{release-state}"!="unreleased"]
|
|||
:rank-eval-client-javadoc: https://artifacts.elastic.co/javadoc/org/elasticsearch/plugin/rank-eval-client/{version}
|
||||
endif::[]
|
||||
|
||||
:javadoc-client: {rest-high-level-client-javadoc}/org/elasticsearch/client
|
||||
:javadoc-xpack: {rest-high-level-client-javadoc}/org/elasticsearch/protocol/xpack
|
||||
:javadoc-license: {rest-high-level-client-javadoc}/org/elasticsearch/protocol/xpack/license
|
||||
:javadoc-watcher: {rest-high-level-client-javadoc}/org/elasticsearch/protocol/xpack/watcher
|
||||
|
||||
///////
|
||||
Shared attribute values are pulled from elastic/docs
|
||||
///////
|
||||
|
|
|
@ -1,38 +1,45 @@
|
|||
[[java-rest-high-document-bulk]]
|
||||
--
|
||||
:api: bulk
|
||||
:request: BulkRequest
|
||||
:response: BulkResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Bulk API
|
||||
|
||||
NOTE: The Java High Level REST Client provides the <<java-rest-high-document-bulk-processor>> to assist with bulk requests
|
||||
NOTE: The Java High Level REST Client provides the
|
||||
<<{upid}-{api}-processor>> to assist with bulk requests.
|
||||
|
||||
[[java-rest-high-document-bulk-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Bulk Request
|
||||
|
||||
A `BulkRequest` can be used to execute multiple index, update and/or delete
|
||||
A +{request}+ can be used to execute multiple index, update and/or delete
|
||||
operations using a single request.
|
||||
|
||||
It requires at least one operation to be added to the Bulk request:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Creates the `BulkRequest`
|
||||
<2> Adds a first `IndexRequest` to the Bulk request. See <<java-rest-high-document-index>>
|
||||
for more information on how to build `IndexRequest`.
|
||||
<1> Creates the +{request}+
|
||||
<2> Adds a first `IndexRequest` to the Bulk request. See <<{upid}-index>> for
|
||||
more information on how to build `IndexRequest`.
|
||||
<3> Adds a second `IndexRequest`
|
||||
<4> Adds a third `IndexRequest`
|
||||
|
||||
WARNING: The Bulk API supports only documents encoded in JSON or SMILE. Providing documents
|
||||
in any other format will result in an error.
|
||||
WARNING: The Bulk API supports only documents encoded in JSON or SMILE.
|
||||
Providing documents in any other format will result in an error.
|
||||
|
||||
And different operation types can be added to the same `BulkRequest`:
|
||||
And different operation types can be added to the same +{request}+:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-request-with-mixed-operations]
|
||||
include-tagged::{doc-tests-file}[{api}-request-with-mixed-operations]
|
||||
--------------------------------------------------
|
||||
<1> Adds a `DeleteRequest` to the `BulkRequest`. See <<java-rest-high-document-delete>>
|
||||
<1> Adds a `DeleteRequest` to the `BulkRequest`. See <<{upid}-delete>>
|
||||
for more information on how to build `DeleteRequest`.
|
||||
<2> Adds an `UpdateRequest` to the `BulkRequest`. See <<java-rest-high-document-update>>
|
||||
<2> Adds an `UpdateRequest` to the `BulkRequest`. See <<{upid}-update>>
|
||||
for more information on how to build `UpdateRequest`.
|
||||
<3> Adds an `IndexRequest` using the SMILE format
|
||||
|
||||
|
@ -41,102 +48,66 @@ The following arguments can optionally be provided:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-request-timeout]
|
||||
include-tagged::{doc-tests-file}[{api}-request-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for the bulk request to be performed as a `TimeValue`
|
||||
<2> Timeout to wait for the bulk request to be performed as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-request-refresh]
|
||||
include-tagged::{doc-tests-file}[{api}-request-refresh]
|
||||
--------------------------------------------------
|
||||
<1> Refresh policy as a `WriteRequest.RefreshPolicy` instance
|
||||
<2> Refresh policy as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-request-active-shards]
|
||||
include-tagged::{doc-tests-file}[{api}-request-active-shards]
|
||||
--------------------------------------------------
|
||||
<1> Sets the number of shard copies that must be active before proceeding with
|
||||
the index/update/delete operations.
|
||||
<2> Number of shard copies provided as a `ActiveShardCount`: can be `ActiveShardCount.ALL`,
|
||||
`ActiveShardCount.ONE` or `ActiveShardCount.DEFAULT` (default)
|
||||
<2> Number of shard copies provided as a `ActiveShardCount`: can be
|
||||
`ActiveShardCount.ALL`, `ActiveShardCount.ONE` or
|
||||
`ActiveShardCount.DEFAULT` (default)
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
[[java-rest-high-document-bulk-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-bulk-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a bulk request requires both the `BulkRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `BulkRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `BulkResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument and contains a list of individual results for each
|
||||
operation that was executed. Note that one or more operations might have
|
||||
failed while the others have been successfully executed.
|
||||
<2> Called when the whole `BulkRequest` fails. In this case the raised
|
||||
exception is provided as an argument and no operation has been executed.
|
||||
|
||||
[[java-rest-high-document-bulk-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Bulk Response
|
||||
|
||||
The returned `BulkResponse` contains information about the executed operations and
|
||||
The returned +{response}+ contains information about the executed operations and
|
||||
allows to iterate over each result as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Iterate over the results of all operations
|
||||
<2> Retrieve the response of the operation (successful or not), can be `IndexResponse`,
|
||||
`UpdateResponse` or `DeleteResponse` which can all be seen as `DocWriteResponse` instances
|
||||
<2> Retrieve the response of the operation (successful or not), can be
|
||||
`IndexResponse`, `UpdateResponse` or `DeleteResponse` which can all be seen as
|
||||
`DocWriteResponse` instances
|
||||
<3> Handle the response of an index operation
|
||||
<4> Handle the response of a update operation
|
||||
<5> Handle the response of a delete operation
|
||||
|
||||
The Bulk response provides a method to quickly check if one or more operation has failed:
|
||||
The Bulk response provides a method to quickly check if one or more operation
|
||||
has failed:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-has-failures]
|
||||
include-tagged::{doc-tests-file}[{api}-has-failures]
|
||||
--------------------------------------------------
|
||||
<1> This method returns `true` if at least one operation failed
|
||||
|
||||
In such situation it is necessary to iterate over all operation results in order to check
|
||||
if the operation failed, and if so, retrieve the corresponding failure:
|
||||
In such situation it is necessary to iterate over all operation results in order
|
||||
to check if the operation failed, and if so, retrieve the corresponding failure:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-errors]
|
||||
include-tagged::{doc-tests-file}[{api}-errors]
|
||||
--------------------------------------------------
|
||||
<1> Indicate if a given operation failed
|
||||
<2> Retrieve the failure of the failed operation
|
||||
|
||||
[[java-rest-high-document-bulk-processor]]
|
||||
[id="{upid}-{api}-processor"]
|
||||
==== Bulk Processor
|
||||
|
||||
The `BulkProcessor` simplifies the usage of the Bulk API by providing
|
||||
|
@ -146,29 +117,30 @@ transparently executed as they are added to the processor.
|
|||
In order to execute the requests, the `BulkProcessor` requires the following
|
||||
components:
|
||||
|
||||
`RestHighLevelClient`:: This client is used to execute the `BulkRequest`
|
||||
`RestHighLevelClient`:: This client is used to execute the +{request}+
|
||||
and to retrieve the `BulkResponse`
|
||||
`BulkProcessor.Listener`:: This listener is called before and after
|
||||
every `BulkRequest` execution or when a `BulkRequest` failed
|
||||
every +{request}+ execution or when a +{request}+ failed
|
||||
|
||||
Then the `BulkProcessor.builder` method can be used to build a new `BulkProcessor`:
|
||||
Then the `BulkProcessor.builder` method can be used to build a new
|
||||
`BulkProcessor`:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-processor-init]
|
||||
include-tagged::{doc-tests-file}[{api}-processor-init]
|
||||
--------------------------------------------------
|
||||
<1> Create the `BulkProcessor.Listener`
|
||||
<2> This method is called before each execution of a `BulkRequest`
|
||||
<3> This method is called after each execution of a `BulkRequest`
|
||||
<4> This method is called when a `BulkRequest` failed
|
||||
<2> This method is called before each execution of a +{request}+
|
||||
<3> This method is called after each execution of a +{request}+
|
||||
<4> This method is called when a +{request}+ failed
|
||||
<5> Create the `BulkProcessor` by calling the `build()` method from
|
||||
the `BulkProcessor.Builder`. The `RestHighLevelClient.bulkAsync()`
|
||||
method will be used to execute the `BulkRequest` under the hood.
|
||||
method will be used to execute the +{request}+ under the hood.
|
||||
|
||||
The `BulkProcessor.Builder` provides methods to configure how the `BulkProcessor`
|
||||
should handle requests execution:
|
||||
The `BulkProcessor.Builder` provides methods to configure how the
|
||||
`BulkProcessor` should handle requests execution:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-processor-options]
|
||||
include-tagged::{doc-tests-file}[{api}-processor-options]
|
||||
--------------------------------------------------
|
||||
<1> Set when to flush a new bulk request based on the number of
|
||||
actions currently added (defaults to 1000, use -1 to disable it)
|
||||
|
@ -186,32 +158,32 @@ for more options.
|
|||
Once the `BulkProcessor` is created requests can be added to it:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-processor-add]
|
||||
include-tagged::{doc-tests-file}[{api}-processor-add]
|
||||
--------------------------------------------------
|
||||
|
||||
The requests will be executed by the `BulkProcessor`, which takes care of
|
||||
calling the `BulkProcessor.Listener` for every bulk request.
|
||||
|
||||
The listener provides methods to access to the `BulkRequest` and the `BulkResponse`:
|
||||
The listener provides methods to access to the +{request}+ and the +{response}+:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-processor-listener]
|
||||
include-tagged::{doc-tests-file}[{api}-processor-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called before each execution of a `BulkRequest`, this method allows
|
||||
to know the number of operations that are going to be executed within the `BulkRequest`
|
||||
<2> Called after each execution of a `BulkRequest`, this method allows
|
||||
to know if the `BulkResponse` contains errors
|
||||
<3> Called if the `BulkRequest` failed, this method allows to know
|
||||
<1> Called before each execution of a +{request}+, this method allows to know
|
||||
the number of operations that are going to be executed within the +{request}+
|
||||
<2> Called after each execution of a +{request}+, this method allows to know if
|
||||
the +{response}+ contains errors
|
||||
<3> Called if the +{request}+ failed, this method allows to know
|
||||
the failure
|
||||
|
||||
Once all requests have been added to the `BulkProcessor`, its instance needs to
|
||||
be closed using one of the two available closing methods.
|
||||
|
||||
The `awaitClose()` method can be used to wait until all requests have been processed
|
||||
or the specified waiting time elapses:
|
||||
The `awaitClose()` method can be used to wait until all requests have been
|
||||
processed or the specified waiting time elapses:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-processor-await]
|
||||
include-tagged::{doc-tests-file}[{api}-processor-await]
|
||||
--------------------------------------------------
|
||||
<1> The method returns `true` if all bulk requests completed and `false` if the
|
||||
waiting time elapsed before all the bulk requests completed
|
||||
|
@ -219,9 +191,8 @@ waiting time elapsed before all the bulk requests completed
|
|||
The `close()` method can be used to immediately close the `BulkProcessor`:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-processor-close]
|
||||
include-tagged::{doc-tests-file}[{api}-processor-close]
|
||||
--------------------------------------------------
|
||||
|
||||
Both methods flush the requests added to the processor before closing the processor
|
||||
and also forbid any new request to be added to it.
|
||||
|
||||
Both methods flush the requests added to the processor before closing the
|
||||
processor and also forbid any new request to be added to it.
|
||||
|
|
|
@ -1,26 +1,33 @@
|
|||
[[java-rest-high-document-delete-by-query]]
|
||||
--
|
||||
:api: delete-by-query
|
||||
:request: DeleteByQueryRequest
|
||||
:response: DeleteByQueryResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Delete By Query API
|
||||
|
||||
[[java-rest-high-document-delete-by-query-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Delete By Query Request
|
||||
|
||||
A `DeleteByQueryRequest` can be used to delete documents from an index. It requires an existing index (or a set of indices)
|
||||
on which deletion is to be performed.
|
||||
A +{request}+ can be used to delete documents from an index. It requires an
|
||||
existing index (or a set of indices) on which deletion is to be performed.
|
||||
|
||||
The simplest form of a `DeleteByQueryRequest` looks like:
|
||||
The simplest form of a +{request}+ looks like this and deletes all documents
|
||||
in an index:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Creates the `DeleteByQueryRequest` on a set of indices.
|
||||
<1> Creates the +{request}+ on a set of indices.
|
||||
|
||||
By default version conflicts abort the `DeleteByQueryRequest` process but you can just count them by settings it to
|
||||
`proceed` in the request body
|
||||
By default version conflicts abort the +{request}+ process but you can just
|
||||
count them with this:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-conflicts]
|
||||
include-tagged::{doc-tests-file}[{api}-request-conflicts]
|
||||
--------------------------------------------------
|
||||
<1> Set `proceed` on version conflict
|
||||
|
||||
|
@ -28,7 +35,7 @@ You can limit the documents by adding a query.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-query]
|
||||
include-tagged::{doc-tests-file}[{api}-request-query]
|
||||
--------------------------------------------------
|
||||
<1> Only copy documents which have field `user` set to `kimchy`
|
||||
|
||||
|
@ -36,32 +43,33 @@ It’s also possible to limit the number of processed documents by setting size.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-size]
|
||||
include-tagged::{doc-tests-file}[{api}-request-size]
|
||||
--------------------------------------------------
|
||||
<1> Only copy 10 documents
|
||||
|
||||
By default `DeleteByQueryRequest` uses batches of 1000. You can change the batch size with `setBatchSize`.
|
||||
By default +{request}+ uses batches of 1000. You can change the batch size
|
||||
with `setBatchSize`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-scrollSize]
|
||||
include-tagged::{doc-tests-file}[{api}-request-scrollSize]
|
||||
--------------------------------------------------
|
||||
<1> Use batches of 100 documents
|
||||
|
||||
`DeleteByQueryRequest` also helps in automatically parallelizing using `sliced-scroll` to
|
||||
slice on `_uid`. Use `setSlices` to specify the number of slices to use.
|
||||
+{request}+ can also be parallelized using `sliced-scroll` with `setSlices`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-slices]
|
||||
include-tagged::{doc-tests-file}[{api}-request-slices]
|
||||
--------------------------------------------------
|
||||
<1> set number of slices to use
|
||||
|
||||
`DeleteByQueryRequest` uses the `scroll` parameter to control how long it keeps the "search context" alive.
|
||||
+{request}+ uses the `scroll` parameter to control how long it keeps the
|
||||
"search context" alive.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-scroll]
|
||||
include-tagged::{doc-tests-file}[{api}-request-scroll]
|
||||
--------------------------------------------------
|
||||
<1> set scroll time
|
||||
|
||||
|
@ -70,7 +78,7 @@ that routing value.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-routing]
|
||||
include-tagged::{doc-tests-file}[{api}-request-routing]
|
||||
--------------------------------------------------
|
||||
<1> set routing
|
||||
|
||||
|
@ -80,72 +88,33 @@ In addition to the options above the following arguments can optionally be also
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-timeout]
|
||||
include-tagged::{doc-tests-file}[{api}-request-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for the delete by query request to be performed as a `TimeValue`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-refresh]
|
||||
include-tagged::{doc-tests-file}[{api}-request-refresh]
|
||||
--------------------------------------------------
|
||||
<1> Refresh index after calling delete by query
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-indicesOptions]
|
||||
include-tagged::{doc-tests-file}[{api}-request-indicesOptions]
|
||||
--------------------------------------------------
|
||||
<1> Set indices options
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
[[java-rest-high-document-delete-by-query-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-delete-by-query-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of an delete by query request requires both the `DeleteByQueryRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `DeleteByQueryRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `BulkByScrollResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument and contains a list of individual results for each
|
||||
operation that was executed. Note that one or more operations might have
|
||||
failed while the others have been successfully executed.
|
||||
<2> Called when the whole `DeleteByQueryRequest` fails. In this case the raised
|
||||
exception is provided as an argument and no operation has been executed.
|
||||
|
||||
[[java-rest-high-document-delete-by-query-execute-listener-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Delete By Query Response
|
||||
|
||||
The returned `BulkByScrollResponse` contains information about the executed operations and
|
||||
allows to iterate over each result as follows:
|
||||
The returned +{response}+ contains information about the executed operations and
|
||||
allows to iterate over each result as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Get total time taken
|
||||
<2> Check if the request timed out
|
||||
|
|
|
@ -1,18 +1,24 @@
|
|||
[[java-rest-high-document-multi-get]]
|
||||
--
|
||||
:api: multi-get
|
||||
:request: MultiGetRequest
|
||||
:response: MultiGetResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Multi-Get API
|
||||
|
||||
The `multiGet` API executes multiple <<java-rest-high-document-get,`get`>>
|
||||
requests in a single http request in parallel.
|
||||
|
||||
[[java-rest-high-document-mulit-get-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Multi-Get Request
|
||||
|
||||
A `MultiGetRequest` is built empty and you add `MultiGetRequest.Item`s to
|
||||
configure what to fetch:
|
||||
A +{request}+ is built empty and you add `MultiGetRequest.Item`s to configure
|
||||
what to fetch:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Index
|
||||
<2> Type
|
||||
|
@ -27,25 +33,25 @@ You can set most of these on the `Item`:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-no-source]
|
||||
include-tagged::{doc-tests-file}[{api}-request-no-source]
|
||||
--------------------------------------------------
|
||||
<1> Disable source retrieval, enabled by default
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-source-include]
|
||||
include-tagged::{doc-tests-file}[{api}-request-source-include]
|
||||
--------------------------------------------------
|
||||
<1> Configure source inclusion for specific fields
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-source-exclude]
|
||||
include-tagged::{doc-tests-file}[{api}-request-source-exclude]
|
||||
--------------------------------------------------
|
||||
<1> Configure source exclusion for specific fields
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-stored]
|
||||
include-tagged::{doc-tests-file}[{api}-request-stored]
|
||||
--------------------------------------------------
|
||||
<1> Configure retrieval for specific stored fields (requires fields to be
|
||||
stored separately in the mappings)
|
||||
|
@ -54,7 +60,7 @@ separately in the mappings)
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-item-extras]
|
||||
include-tagged::{doc-tests-file}[{api}-request-item-extras]
|
||||
--------------------------------------------------
|
||||
<1> Routing value
|
||||
<2> Version
|
||||
|
@ -68,56 +74,18 @@ not on any items:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-request-top-level-extras]
|
||||
include-tagged::{doc-tests-file}[{api}-request-top-level-extras]
|
||||
--------------------------------------------------
|
||||
<1> Preference value
|
||||
<2> Set realtime flag to `false` (`true` by default)
|
||||
<3> Perform a refresh before retrieving the document (`false` by default)
|
||||
|
||||
[[java-rest-high-document-multi-get-sync]]
|
||||
==== Synchronous Execution
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
After building the `MultiGetRequest` you can execute it synchronously with
|
||||
`multiGet`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-multi-get-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a multi get request requires both the
|
||||
`MultiGetRequest` instance and an `ActionListener` instance to be passed to
|
||||
the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `MultiGetRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the
|
||||
request completed the `ActionListener` is called back using the `onResponse`
|
||||
method if the execution successfully completed or using the `onFailure` method
|
||||
if it failed.
|
||||
|
||||
A typical listener for `MultiGetResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
||||
|
||||
[[java-rest-high-document-multi-get-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Multi Get Response
|
||||
|
||||
The returned `MultiGetResponse` contains a list of `MultiGetItemResponse`s in
|
||||
The returned +{response}+ contains a list of `MultiGetItemResponse`s in
|
||||
`getResponses` in the same order that they were requested.
|
||||
`MultiGetItemResponse` contains *either* a
|
||||
<<java-rest-high-document-get-response, `GetResponse`>> if the get succeeded
|
||||
|
@ -126,7 +94,7 @@ normal `GetResponse`.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> `getFailure` returns null because there isn't a failure.
|
||||
<2> `getResponse` returns the `GetResponse`.
|
||||
|
@ -143,7 +111,7 @@ When one of the subrequests as performed against an index that does not exist
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-indexnotfound]
|
||||
include-tagged::{doc-tests-file}[{api}-indexnotfound]
|
||||
--------------------------------------------------
|
||||
<1> `getResponse` is null.
|
||||
<2> `getFailure` isn't and contains an `Exception`.
|
||||
|
@ -157,7 +125,7 @@ document has a different version number, a version conflict is raised:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[multi-get-conflict]
|
||||
include-tagged::{doc-tests-file}[{api}-conflict]
|
||||
--------------------------------------------------
|
||||
<1> `getResponse` is null.
|
||||
<2> `getFailure` isn't and contains an `Exception`.
|
||||
|
|
|
@ -1,22 +1,29 @@
|
|||
[[java-rest-high-document-reindex]]
|
||||
--
|
||||
:api: reindex
|
||||
:request: ReindexRequest
|
||||
:response: BulkByScrollResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Reindex API
|
||||
|
||||
[[java-rest-high-document-reindex-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Reindex Request
|
||||
|
||||
A `ReindexRequest` can be used to copy documents from one or more indexes into a destination index.
|
||||
A +{request} can be used to copy documents from one or more indexes into a
|
||||
destination index.
|
||||
|
||||
It requires an existing source index and a target index which may or may not exist pre-request. Reindex does not attempt
|
||||
to set up the destination index. It does not copy the settings of the source index. You should set up the destination
|
||||
index prior to running a _reindex action, including setting up mappings, shard counts, replicas, etc.
|
||||
|
||||
The simplest form of a `ReindexRequest` looks like follows:
|
||||
The simplest form of a +{request}+ looks like this:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Creates the `ReindexRequest`
|
||||
<1> Creates the +{request}+
|
||||
<2> Adds a list of sources to copy from
|
||||
<3> Adds the destination index
|
||||
|
||||
|
@ -28,7 +35,7 @@ source index.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-versionType]
|
||||
include-tagged::{doc-tests-file}[{api}-request-versionType]
|
||||
--------------------------------------------------
|
||||
<1> Set the versionType to `EXTERNAL`
|
||||
|
||||
|
@ -37,16 +44,16 @@ documents will cause a version conflict. The default `opType` is `index`.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-opType]
|
||||
include-tagged::{doc-tests-file}[{api}-request-opType]
|
||||
--------------------------------------------------
|
||||
<1> Set the opType to `create`
|
||||
|
||||
By default version conflicts abort the `_reindex` process but you can just count them by settings it to `proceed`
|
||||
in the request body
|
||||
By default version conflicts abort the `_reindex` process but you can just count
|
||||
them instead with:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-conflicts]
|
||||
include-tagged::{doc-tests-file}[{api}-request-conflicts]
|
||||
--------------------------------------------------
|
||||
<1> Set `proceed` on version conflict
|
||||
|
||||
|
@ -54,7 +61,7 @@ You can limit the documents by adding a type to the source or by adding a query.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-typeOrQuery]
|
||||
include-tagged::{doc-tests-file}[{api}-request-typeOrQuery]
|
||||
--------------------------------------------------
|
||||
<1> Only copy `doc` type
|
||||
<2> Only copy documents which have field `user` set to `kimchy`
|
||||
|
@ -63,7 +70,7 @@ It’s also possible to limit the number of processed documents by setting size.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-size]
|
||||
include-tagged::{doc-tests-file}[{api}-request-size]
|
||||
--------------------------------------------------
|
||||
<1> Only copy 10 documents
|
||||
|
||||
|
@ -71,7 +78,7 @@ By default `_reindex` uses batches of 1000. You can change the batch size with `
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-sourceSize]
|
||||
include-tagged::{doc-tests-file}[{api}-request-sourceSize]
|
||||
--------------------------------------------------
|
||||
<1> Use batches of 100 documents
|
||||
|
||||
|
@ -79,7 +86,7 @@ Reindex can also use the ingest feature by specifying a `pipeline`.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-pipeline]
|
||||
include-tagged::{doc-tests-file}[{api}-request-pipeline]
|
||||
--------------------------------------------------
|
||||
<1> set pipeline to `my_pipeline`
|
||||
|
||||
|
@ -88,21 +95,21 @@ selective query to size and sort.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-sort]
|
||||
include-tagged::{doc-tests-file}[{api}-request-sort]
|
||||
--------------------------------------------------
|
||||
<1> add descending sort to`field1`
|
||||
<2> add ascending sort to `field2`
|
||||
|
||||
`ReindexRequest` also supports a `script` that modifies the document. It allows you to also change the document's
|
||||
metadata. The following example illustrates that.
|
||||
+{request} also supports a `script` that modifies the document. It allows you to
|
||||
also change the document's metadata. The following example illustrates that.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-script]
|
||||
include-tagged::{doc-tests-file}[{api}-request-script]
|
||||
--------------------------------------------------
|
||||
<1> `setScript` to increment the `likes` field on all documents with user `kimchy`.
|
||||
|
||||
`ReindexRequest` supports reindexing from a remote Elasticsearch cluster. When using a remote cluster the query should be
|
||||
+{request}+ supports reindexing from a remote Elasticsearch cluster. When using a remote cluster the query should be
|
||||
specified inside the `RemoteInfo` object and not using `setSourceQuery`. If both the remote info and the source query are
|
||||
set it results in a validation error during the request. The reason for this is that the remote Elasticsearch may not
|
||||
understand queries built by the modern query builders. The remote cluster support works all the way back to Elasticsearch
|
||||
|
@ -111,23 +118,24 @@ in JSON.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-remote]
|
||||
include-tagged::{doc-tests-file}[{api}-request-remote]
|
||||
--------------------------------------------------
|
||||
<1> set remote elastic cluster
|
||||
|
||||
`ReindexRequest` also helps in automatically parallelizing using `sliced-scroll` to
|
||||
+{request}+ also helps in automatically parallelizing using `sliced-scroll` to
|
||||
slice on `_uid`. Use `setSlices` to specify the number of slices to use.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-slices]
|
||||
include-tagged::{doc-tests-file}[{api}-request-slices]
|
||||
--------------------------------------------------
|
||||
<1> set number of slices to use
|
||||
|
||||
`ReindexRequest` uses the `scroll` parameter to control how long it keeps the "search context" alive.
|
||||
+{request}+ uses the `scroll` parameter to control how long it keeps the
|
||||
"search context" alive.
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-scroll]
|
||||
include-tagged::{doc-tests-file}[{api}-request-scroll]
|
||||
--------------------------------------------------
|
||||
<1> set scroll time
|
||||
|
||||
|
@ -137,66 +145,27 @@ In addition to the options above the following arguments can optionally be also
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-timeout]
|
||||
include-tagged::{doc-tests-file}[{api}-request-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for the reindex request to be performed as a `TimeValue`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-refresh]
|
||||
include-tagged::{doc-tests-file}[{api}-request-refresh]
|
||||
--------------------------------------------------
|
||||
<1> Refresh index after calling reindex
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
[[java-rest-high-document-reindex-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-reindex-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a reindex request requires both the `ReindexRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `ReindexRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `BulkByScrollResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument and contains a list of individual results for each
|
||||
operation that was executed. Note that one or more operations might have
|
||||
failed while the others have been successfully executed.
|
||||
<2> Called when the whole `ReindexRequest` fails. In this case the raised
|
||||
exception is provided as an argument and no operation has been executed.
|
||||
|
||||
[[java-rest-high-document-reindex-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Reindex Response
|
||||
|
||||
The returned `BulkByScrollResponse` contains information about the executed operations and
|
||||
allows to iterate over each result as follows:
|
||||
The returned +{response}+ contains information about the executed operations and
|
||||
allows to iterate over each result as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Get total time taken
|
||||
<2> Check if the request timed out
|
||||
|
|
|
@ -1,10 +1,16 @@
|
|||
[[java-rest-high-document-rethrottle]]
|
||||
--
|
||||
:api: rethrottle
|
||||
:request: RethrottleRequest
|
||||
:response: ListTasksResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Rethrottle API
|
||||
|
||||
[[java-rest-high-document-rethrottle-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Rethrottle Request
|
||||
|
||||
A `RethrottleRequest` can be used to change the current throttling on a running
|
||||
A +{request}+ can be used to change the current throttling on a running
|
||||
reindex, update-by-query or delete-by-query task or to disable throttling of
|
||||
the task entirely. It requires the task Id of the task to change.
|
||||
|
||||
|
@ -13,16 +19,16 @@ task using the following:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-disable-request]
|
||||
include-tagged::{doc-tests-file}[{api}-disable-request]
|
||||
--------------------------------------------------
|
||||
<1> Create a `RethrottleRequest` that disables throttling for a specific task id
|
||||
<1> Create a +{request}+ that disables throttling for a specific task id
|
||||
|
||||
By providing a `requestsPerSecond` argument, the request will change the
|
||||
existing task throttling to the specified value:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Request to change the throttling of a task to 100 requests per second
|
||||
|
||||
|
@ -32,22 +38,22 @@ should be rethrottled:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-request-execution]
|
||||
include-tagged::{doc-tests-file}[{api}-request-execution]
|
||||
--------------------------------------------------
|
||||
<1> Execute reindex rethrottling request
|
||||
<2> The same for update-by-query
|
||||
<3> The same for delete-by-query
|
||||
|
||||
[[java-rest-high-document-rethrottle-async]]
|
||||
[id="{upid}-{api}-async"]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a rethrottle request requires both the `RethrottleRequest`
|
||||
The asynchronous execution of a rethrottle request requires both the +{request}+
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-execute-async]
|
||||
include-tagged::{doc-tests-file}[{api}-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> Execute reindex rethrottling asynchronously
|
||||
<2> The same for update-by-query
|
||||
|
@ -60,14 +66,14 @@ it failed. A typical listener looks like this:
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[rethrottle-request-async-listener]
|
||||
include-tagged::{doc-tests-file}[{api}-request-async-listener]
|
||||
--------------------------------------------------
|
||||
<1> Code executed when the request is successfully completed
|
||||
<2> Code executed when the request fails with an exception
|
||||
|
||||
[[java-rest-high-document-retrottle-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Rethrottle Response
|
||||
|
||||
Rethrottling returns the task that has been rethrottled in the form of a
|
||||
`ListTasksResponse`. The structure of this response object is described in detail
|
||||
Rethrottling returns the task that has been rethrottled in the form of a
|
||||
+{response}+. The structure of this response object is described in detail
|
||||
in <<java-rest-high-cluster-list-tasks-response,this section>>.
|
||||
|
|
|
@ -1,27 +1,34 @@
|
|||
[[java-rest-high-document-update-by-query]]
|
||||
--
|
||||
:api: update-by-query
|
||||
:request: UpdateByQueryRequest
|
||||
:response: UpdateByQueryResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Update By Query API
|
||||
|
||||
[[java-rest-high-document-update-by-query-request]]
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Update By Query Request
|
||||
|
||||
A `UpdateByQueryRequest` can be used to update documents in an index.
|
||||
A +{request}+ can be used to update documents in an index.
|
||||
|
||||
It requires an existing index (or a set of indices) on which the update is to be performed.
|
||||
It requires an existing index (or a set of indices) on which the update is to
|
||||
be performed.
|
||||
|
||||
The simplest form of a `UpdateByQueryRequest` looks like follows:
|
||||
The simplest form of a +{request}+ looks like this:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request]
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> Creates the `UpdateByQueryRequest` on a set of indices.
|
||||
<1> Creates the +{request}+ on a set of indices.
|
||||
|
||||
By default version conflicts abort the `UpdateByQueryRequest` process but you can just count them by settings it to
|
||||
`proceed` in the request body
|
||||
By default version conflicts abort the +{request}+ process but you can just
|
||||
count them instead with:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-conflicts]
|
||||
include-tagged::{doc-tests-file}[{api}-request-conflicts]
|
||||
--------------------------------------------------
|
||||
<1> Set `proceed` on version conflict
|
||||
|
||||
|
@ -29,7 +36,7 @@ You can limit the documents by adding a query.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-query]
|
||||
include-tagged::{doc-tests-file}[{api}-request-query]
|
||||
--------------------------------------------------
|
||||
<1> Only copy documents which have field `user` set to `kimchy`
|
||||
|
||||
|
@ -37,15 +44,16 @@ It’s also possible to limit the number of processed documents by setting size.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-size]
|
||||
include-tagged::{doc-tests-file}[{api}-request-size]
|
||||
--------------------------------------------------
|
||||
<1> Only copy 10 documents
|
||||
|
||||
By default `UpdateByQueryRequest` uses batches of 1000. You can change the batch size with `setBatchSize`.
|
||||
By default +{request}+ uses batches of 1000. You can change the batch size with
|
||||
`setBatchSize`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-scrollSize]
|
||||
include-tagged::{doc-tests-file}[{api}-request-scrollSize]
|
||||
--------------------------------------------------
|
||||
<1> Use batches of 100 documents
|
||||
|
||||
|
@ -53,24 +61,23 @@ Update by query can also use the ingest feature by specifying a `pipeline`.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-pipeline]
|
||||
include-tagged::{doc-tests-file}[{api}-request-pipeline]
|
||||
--------------------------------------------------
|
||||
<1> set pipeline to `my_pipeline`
|
||||
|
||||
`UpdateByQueryRequest` also supports a `script` that modifies the document. The following example illustrates that.
|
||||
+{request}+ also supports a `script` that modifies the document:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-script]
|
||||
include-tagged::{doc-tests-file}[{api}-request-script]
|
||||
--------------------------------------------------
|
||||
<1> `setScript` to increment the `likes` field on all documents with user `kimchy`.
|
||||
|
||||
`UpdateByQueryRequest` also helps in automatically parallelizing using `sliced-scroll` to
|
||||
slice on `_uid`. Use `setSlices` to specify the number of slices to use.
|
||||
+{request}+ can be parallelized using `sliced-scroll` with `setSlices`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-slices]
|
||||
include-tagged::{doc-tests-file}[{api}-request-slices]
|
||||
--------------------------------------------------
|
||||
<1> set number of slices to use
|
||||
|
||||
|
@ -78,7 +85,7 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-sli
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-scroll]
|
||||
include-tagged::{doc-tests-file}[{api}-request-scroll]
|
||||
--------------------------------------------------
|
||||
<1> set scroll time
|
||||
|
||||
|
@ -87,7 +94,7 @@ that routing value.
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-routing]
|
||||
include-tagged::{doc-tests-file}[{api}-request-routing]
|
||||
--------------------------------------------------
|
||||
<1> set routing
|
||||
|
||||
|
@ -97,72 +104,33 @@ In addition to the options above the following arguments can optionally be also
|
|||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-timeout]
|
||||
include-tagged::{doc-tests-file}[{api}-request-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for the update by query request to be performed as a `TimeValue`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-refresh]
|
||||
include-tagged::{doc-tests-file}[{api}-request-refresh]
|
||||
--------------------------------------------------
|
||||
<1> Refresh index after calling update by query
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-indicesOptions]
|
||||
include-tagged::{doc-tests-file}[{api}-request-indicesOptions]
|
||||
--------------------------------------------------
|
||||
<1> Set indices options
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
[[java-rest-high-document-update-by-query-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-document-update-by-query-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of an update by query request requires both the `UpdateByQueryRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `UpdateByQueryRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `BulkByScrollResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument and contains a list of individual results for each
|
||||
operation that was executed. Note that one or more operations might have
|
||||
failed while the others have been successfully executed.
|
||||
<2> Called when the whole `UpdateByQueryRequest` fails. In this case the raised
|
||||
exception is provided as an argument and no operation has been executed.
|
||||
|
||||
[[java-rest-high-document-update-by-query-execute-listener-response]]
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Update By Query Response
|
||||
|
||||
The returned `BulkByScrollResponse` contains information about the executed operations and
|
||||
allows to iterate over each result as follows:
|
||||
The returned +{resposne}+ contains information about the executed operations and
|
||||
allows to iterate over each result as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-response]
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Get total time taken
|
||||
<2> Check if the request timed out
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
--
|
||||
:api: update-datafeed
|
||||
:request: UpdateDatafeedRequest
|
||||
:response: PutDatafeedResponse
|
||||
--
|
||||
[id="{upid}-{api}"]
|
||||
=== Update Datafeed API
|
||||
|
||||
The Update Datafeed API can be used to update a {ml} datafeed
|
||||
in the cluster. The API accepts a +{request}+ object
|
||||
as a request and returns a +{response}+.
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Update Datafeed Request
|
||||
|
||||
A +{request}+ requires the following argument:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> The updated configuration of the {ml} datafeed
|
||||
|
||||
[id="{upid}-{api}-config"]
|
||||
==== Updated Datafeed Arguments
|
||||
|
||||
A `DatafeedUpdate` requires an existing non-null `datafeedId` and
|
||||
allows updating various settings.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-config]
|
||||
--------------------------------------------------
|
||||
<1> Mandatory, non-null `datafeedId` referencing an existing {ml} datafeed
|
||||
<2> Optional, set the datafeed Aggregations for data gathering
|
||||
<3> Optional, the indices that contain the data to retrieve and feed into the job
|
||||
<4> Optional, specifies how data searches are split into time chunks.
|
||||
<5> Optional, the interval at which scheduled queries are made while the datafeed runs in real time.
|
||||
<6> Optional, a query to filter the search results by. Defaults to the `match_all` query.
|
||||
<7> Optional, the time interval behind real time that data is queried.
|
||||
<8> Optional, allows the use of script fields.
|
||||
<9> Optional, the `size` parameter used in the searches.
|
||||
<10> Optional, the `jobId` that references the job that the datafeed should be associated with
|
||||
after the update.
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ returns the full representation of
|
||||
the updated {ml} datafeed if it has been successfully updated.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> The updated datafeed
|
|
@ -0,0 +1,34 @@
|
|||
--
|
||||
:api: rollup-start-job
|
||||
:request: StartRollupJobRequest
|
||||
:response: StartRollupJobResponse
|
||||
--
|
||||
|
||||
[id="{upid}-{api}"]
|
||||
=== Start Rollup Job API
|
||||
|
||||
[id="{upid}-{api}-request"]
|
||||
==== Request
|
||||
|
||||
The Start Rollup Job API allows you to start a job by ID.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request]
|
||||
--------------------------------------------------
|
||||
<1> The ID of the job to start.
|
||||
|
||||
[id="{upid}-{api}-response"]
|
||||
==== Response
|
||||
|
||||
The returned +{response}+ indicates if the start command was received.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-response]
|
||||
--------------------------------------------------
|
||||
<1> Whether or not the start job request was received.
|
||||
|
||||
include::../execution.asciidoc[]
|
||||
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
[[java-rest-high-security-create-token]]
|
||||
=== Create Token API
|
||||
|
||||
[[java-rest-high-security-create-token-request]]
|
||||
==== Request
|
||||
The `CreateTokenRequest` supports three different OAuth2 _grant types_:
|
||||
|
||||
===== Password Grants
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[create-token-password-request]
|
||||
--------------------------------------------------
|
||||
|
||||
===== Refresh Token Grants
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[create-token-refresh-request]
|
||||
--------------------------------------------------
|
||||
|
||||
===== Client Credential Grants
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[create-token-client-credentials-request]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-security-create-token-execution]]
|
||||
==== Execution
|
||||
|
||||
Creating a OAuth2 security token can be performed by passing the appropriate request to the
|
||||
`security().createToken()` method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[create-token-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-security-create-token-response]]
|
||||
==== Response
|
||||
|
||||
The returned `CreateTokenResponse` contains the following properties:
|
||||
|
||||
`accessToken`:: This is the newly created access token.
|
||||
It can be used to authenticate to the Elasticsearch cluster.
|
||||
`type`:: The type of the token, this is always `"Bearer"`.
|
||||
`expiresIn`:: The length of time until the token will expire.
|
||||
The token will be considered invalid after that time.
|
||||
`scope`:: The scope of the token. May be `null`.
|
||||
`refreshToken`:: A secondary "refresh" token that may be used to extend
|
||||
the life of an access token. May be `null`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[create-token-response]
|
||||
--------------------------------------------------
|
||||
<1> The `accessToken` can be used to authentication to Elasticsearch.
|
||||
<2> The `refreshToken` can be used in to create a new `CreateTokenRequest` with a `refresh_token` grant.
|
||||
|
||||
[[java-rest-high-security-create-token-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously using the `security().createTokenAsync()`
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[create-token-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `CreateTokenRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the request
|
||||
has completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for a `CreateTokenResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[create-token-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
|
@ -0,0 +1,67 @@
|
|||
[[java-rest-high-security-get-role-mappings]]
|
||||
=== Get Role Mappings API
|
||||
|
||||
[[java-rest-high-security-get-role-mappings-execution]]
|
||||
==== Execution
|
||||
|
||||
Retrieving a role mapping can be performed using the `security().getRoleMappings()`
|
||||
method and by setting role mapping name on `GetRoleMappingsRequest`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[get-role-mappings-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
Retrieving multiple role mappings can be performed using the `security.getRoleMappings()`
|
||||
method and by setting role mapping names on `GetRoleMappingsRequest`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[get-role-mappings-list-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
Retrieving all role mappings can be performed using the `security.getRoleMappings()`
|
||||
method and with no role mapping name on `GetRoleMappingsRequest`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[get-role-mappings-all-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-security-get-role-mappings-response]]
|
||||
==== Response
|
||||
|
||||
The returned `GetRoleMappingsResponse` contains the list of role mapping(s).
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[get-role-mappings-response]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-security-get-role-mappings-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously using the `security().getRoleMappingsAsync()`
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[get-role-mappings-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetRoleMappingsRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the request
|
||||
has completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for a `GetRoleMappingsResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[get-role-mappings-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
|
@ -240,6 +240,7 @@ The Java High Level REST Client supports the following Machine Learning APIs:
|
|||
* <<{upid}-update-job>>
|
||||
* <<{upid}-get-job-stats>>
|
||||
* <<{upid}-put-datafeed>>
|
||||
* <<{upid}-update-datafeed>>
|
||||
* <<{upid}-get-datafeed>>
|
||||
* <<{upid}-delete-datafeed>>
|
||||
* <<{upid}-preview-datafeed>>
|
||||
|
@ -266,6 +267,7 @@ include::ml/close-job.asciidoc[]
|
|||
include::ml/update-job.asciidoc[]
|
||||
include::ml/flush-job.asciidoc[]
|
||||
include::ml/put-datafeed.asciidoc[]
|
||||
include::ml/update-datafeed.asciidoc[]
|
||||
include::ml/get-datafeed.asciidoc[]
|
||||
include::ml/delete-datafeed.asciidoc[]
|
||||
include::ml/preview-datafeed.asciidoc[]
|
||||
|
@ -301,11 +303,13 @@ include::migration/get-assistance.asciidoc[]
|
|||
The Java High Level REST Client supports the following Rollup APIs:
|
||||
|
||||
* <<java-rest-high-x-pack-rollup-put-job>>
|
||||
* <<{upid}-rollup-start-job>>
|
||||
* <<{upid}-rollup-delete-job>>
|
||||
* <<java-rest-high-x-pack-rollup-get-job>>
|
||||
* <<{upid}-x-pack-rollup-get-rollup-caps>>
|
||||
|
||||
include::rollup/put_job.asciidoc[]
|
||||
include::rollup/start_job.asciidoc[]
|
||||
include::rollup/delete_job.asciidoc[]
|
||||
include::rollup/get_job.asciidoc[]
|
||||
include::rollup/get_rollup_caps.asciidoc[]
|
||||
|
@ -325,7 +329,9 @@ The Java High Level REST Client supports the following Security APIs:
|
|||
* <<{upid}-clear-roles-cache>>
|
||||
* <<java-rest-high-security-get-certificates>>
|
||||
* <<java-rest-high-security-put-role-mapping>>
|
||||
* <<java-rest-high-security-get-role-mappings>>
|
||||
* <<java-rest-high-security-delete-role-mapping>>
|
||||
* <<java-rest-high-security-create-token>>
|
||||
|
||||
include::security/put-user.asciidoc[]
|
||||
include::security/enable-user.asciidoc[]
|
||||
|
@ -335,7 +341,9 @@ include::security/delete-role.asciidoc[]
|
|||
include::security/clear-roles-cache.asciidoc[]
|
||||
include::security/get-certificates.asciidoc[]
|
||||
include::security/put-role-mapping.asciidoc[]
|
||||
include::security/get-role-mappings.asciidoc[]
|
||||
include::security/delete-role-mapping.asciidoc[]
|
||||
include::security/create-token.asciidoc[]
|
||||
|
||||
== Watcher APIs
|
||||
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
[role="xpack"]
|
||||
[testenv="platinum"]
|
||||
[[ccr-get-auto-follow-stats]]
|
||||
=== Get Auto-Follow Stats API
|
||||
++++
|
||||
<titleabbrev>Get Auto-Follow Stats</titleabbrev>
|
||||
++++
|
||||
|
||||
Get auto-follow stats.
|
||||
|
||||
==== Description
|
||||
|
||||
This API gets stats about auto-follow patterns.
|
||||
|
||||
==== Request
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_ccr/auto_follow/stats
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST
|
||||
|
||||
==== Example
|
||||
|
||||
This example retrieves stats about auto-follow patterns:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_ccr/auto_follow/stats
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST
|
||||
|
||||
The API returns the following result:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"number_of_successful_follow_indices" : 16,
|
||||
"number_of_failed_follow_indices" : 0,
|
||||
"number_of_failed_remote_cluster_state_requests" : 0,
|
||||
"recent_auto_follow_errors" : [ ]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"number_of_successful_follow_indices" : 16/"number_of_successful_follow_indices" : $body.number_of_successful_follow_indices/]
|
|
@ -5,6 +5,12 @@
|
|||
|
||||
You can use the following APIs to perform {ccr} operations.
|
||||
|
||||
[float]
|
||||
[[ccr-api-top-level]]
|
||||
=== Top-Level
|
||||
|
||||
* <<ccr-get-stats,Get {ccr} stats>>
|
||||
|
||||
[float]
|
||||
[[ccr-api-follow]]
|
||||
=== Follow
|
||||
|
@ -22,7 +28,9 @@ You can use the following APIs to perform {ccr} operations.
|
|||
* <<ccr-put-auto-follow-pattern,Create auto-follow pattern>>
|
||||
* <<ccr-delete-auto-follow-pattern,Delete auto-follow pattern>>
|
||||
* <<ccr-get-auto-follow-pattern,Get auto-follow patterns>>
|
||||
* <<ccr-get-auto-follow-stats,Get stats about auto-follow patterns>>
|
||||
|
||||
// top-level
|
||||
include::get-ccr-stats.asciidoc[]
|
||||
|
||||
// follow
|
||||
include::follow/put-follow.asciidoc[]
|
||||
|
@ -35,4 +43,3 @@ include::follow/get-follow-stats.asciidoc[]
|
|||
include::auto-follow/put-auto-follow-pattern.asciidoc[]
|
||||
include::auto-follow/delete-auto-follow-pattern.asciidoc[]
|
||||
include::auto-follow/get-auto-follow-pattern.asciidoc[]
|
||||
include::auto-follow/get-auto-follow-stats.asciidoc[]
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue