Merge branch master into zen2

2 types of conflicts during the merge:
1) Line length fix
2) Classes no longer extend AbstractComponent
This commit is contained in:
Andrey Ershov 2018-11-21 15:12:58 +01:00
commit 6ac0cb1842
669 changed files with 17429 additions and 5857 deletions

View File

@ -150,8 +150,9 @@ Default value provided below in [brackets].
=== Load balancing and caches.
By default the tests run on up to 4 JVMs based on the number of cores. If you
want to explicitly specify the number of JVMs you can do so on the command
By default the tests run on multiple processes using all the available cores on all
available CPUs. Not including hyper-threading.
If you want to explicitly specify the number of JVMs you can do so on the command
line:
----------------------------

View File

@ -25,6 +25,9 @@ import org.elasticsearch.gradle.VersionCollection
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import java.util.function.Predicate
plugins {
id 'com.gradle.build-scan' version '1.13.2'
@ -622,3 +625,19 @@ allprojects {
}
}
}
allprojects {
task checkPart1
task checkPart2
tasks.matching { it.name == "check" }.all { check ->
if (check.path.startsWith(":x-pack:")) {
checkPart2.dependsOn check
} else {
checkPart1.dependsOn check
}
}
}

View File

@ -773,9 +773,32 @@ class BuildPlugin implements Plugin<Project> {
}
static void applyCommonTestConfig(Project project) {
String defaultParallel = 'auto'
// Count physical cores on any Linux distro ( don't count hyper-threading )
if (project.file("/proc/cpuinfo").exists()) {
Map<String, Integer> socketToCore = [:]
String currentID = ""
project.file("/proc/cpuinfo").readLines().forEach({ line ->
if (line.contains(":")) {
List<String> parts = line.split(":", 2).collect({it.trim()})
String name = parts[0], value = parts[1]
// the ID of the CPU socket
if (name == "physical id") {
currentID = value
}
// Number of cores not including hyper-threading
if (name == "cpu cores") {
assert currentID.isEmpty() == false
socketToCore[currentID] = Integer.valueOf(value)
currentID = ""
}
}
})
defaultParallel = socketToCore.values().sum().toString();
}
project.tasks.withType(RandomizedTestingTask) {
jvm "${project.runtimeJavaHome}/bin/java"
parallelism System.getProperty('tests.jvms', 'auto')
parallelism System.getProperty('tests.jvms', defaultParallel)
ifNoTests System.getProperty('tests.ifNoTests', 'fail')
onNonEmptyWorkDirectory 'wipe'
leaveTemporary true

View File

@ -24,7 +24,9 @@ import org.apache.rat.license.SimpleLicenseFamily
import org.elasticsearch.gradle.AntTask
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.OutputFile
import org.gradle.api.tasks.SkipWhenEmpty
import org.gradle.api.tasks.SourceSet
import java.nio.file.Files
@ -39,12 +41,6 @@ public class LicenseHeadersTask extends AntTask {
@OutputFile
File reportFile = new File(project.buildDir, 'reports/licenseHeaders/rat.log')
/**
* The list of java files to check. protected so the afterEvaluate closure in the
* constructor can write to it.
*/
protected List<FileCollection> javaFiles
/** Allowed license families for this project. */
@Input
List<String> approvedLicenses = ['Apache', 'Generated']
@ -65,11 +61,16 @@ public class LicenseHeadersTask extends AntTask {
LicenseHeadersTask() {
description = "Checks sources for missing, incorrect, or unacceptable license headers"
// Delay resolving the dependencies until after evaluation so we pick up generated sources
project.afterEvaluate {
javaFiles = project.sourceSets.collect({it.allJava})
inputs.files(javaFiles)
}
}
/**
* The list of java files to check. protected so the afterEvaluate closure in the
* constructor can write to it.
*/
@InputFiles
@SkipWhenEmpty
public List<FileCollection> getJavaFiles() {
return project.sourceSets.collect({it.allJava})
}
/**
@ -97,9 +98,8 @@ public class LicenseHeadersTask extends AntTask {
Files.deleteIfExists(reportFile.toPath())
// run rat, going to the file
List<FileCollection> input = javaFiles
ant.ratReport(reportFile: reportFile.absolutePath, addDefaultLicenseMatchers: true) {
for (FileCollection dirSet : input) {
for (FileCollection dirSet : javaFiles) {
for (File dir: dirSet.srcDirs) {
// sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main...
if (dir.exists()) {

View File

@ -69,7 +69,7 @@ class ClusterConfiguration {
*/
@Input
Closure<Integer> minimumMasterNodes = {
if (bwcVersion != null && bwcVersion.before("6.5.0-SNAPSHOT")) {
if (bwcVersion != null && bwcVersion.before("6.5.0")) {
return numNodes > 1 ? numNodes : -1
} else {
return numNodes > 1 ? numNodes.intdiv(2) + 1 : -1

View File

@ -127,7 +127,7 @@ class ClusterFormationTasks {
nodes.add(node)
Closure<Map> writeConfigSetup
Object dependsOn
if (node.nodeVersion.onOrAfter("6.5.0-SNAPSHOT")) {
if (node.nodeVersion.onOrAfter("6.5.0")) {
writeConfigSetup = { Map esConfig ->
// Don't force discovery provider if one is set by the test cluster specs already
if (esConfig.containsKey('discovery.zen.hosts_provider') == false) {
@ -140,7 +140,7 @@ class ClusterFormationTasks {
} else {
dependsOn = startTasks.empty ? startDependencies : startTasks.get(0)
writeConfigSetup = { Map esConfig ->
String unicastTransportUri = node.config.unicastTransportUri(nodes.get(0), node, project.ant)
String unicastTransportUri = node.config.unicastTransportUri(nodes.get(0), node, project.createAntBuilder())
if (unicastTransportUri == null) {
esConfig['discovery.zen.ping.unicast.hosts'] = []
} else {
@ -717,7 +717,7 @@ class ClusterFormationTasks {
Collection<String> unicastHosts = new HashSet<>()
nodes.forEach { node ->
unicastHosts.addAll(node.config.otherUnicastHostAddresses.call())
String unicastHost = node.config.unicastTransportUri(node, null, project.ant)
String unicastHost = node.config.unicastTransportUri(node, null, project.createAntBuilder())
if (unicastHost != null) {
unicastHosts.add(unicastHost)
}
@ -913,9 +913,10 @@ class ClusterFormationTasks {
outputPrintStream: outputStream,
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
project.ant.project.addBuildListener(listener)
Object retVal = command(project.ant)
project.ant.project.removeBuildListener(listener)
AntBuilder ant = project.createAntBuilder()
ant.project.addBuildListener(listener)
Object retVal = command(ant)
ant.project.removeBuildListener(listener)
return retVal
}

View File

@ -45,94 +45,6 @@
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
files start to pass. -->
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]health[/\\]ClusterHealthRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]health[/\\]TransportClusterHealthAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]hotthreads[/\\]NodesHotThreadsRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]stats[/\\]NodesStatsRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]delete[/\\]DeleteRepositoryRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]delete[/\\]TransportDeleteRepositoryAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]get[/\\]GetRepositoriesRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]get[/\\]TransportGetRepositoriesAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]put[/\\]PutRepositoryRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]put[/\\]TransportPutRepositoryAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]verify[/\\]TransportVerifyRepositoryAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]verify[/\\]VerifyRepositoryRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]reroute[/\\]TransportClusterRerouteAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]settings[/\\]ClusterUpdateSettingsRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]shards[/\\]ClusterSearchShardsRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]shards[/\\]TransportClusterSearchShardsAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]create[/\\]CreateSnapshotRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]create[/\\]TransportCreateSnapshotAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]delete[/\\]DeleteSnapshotRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]delete[/\\]TransportDeleteSnapshotAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]get[/\\]GetSnapshotsRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]restore[/\\]RestoreSnapshotRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]restore[/\\]TransportRestoreSnapshotAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]status[/\\]SnapshotsStatusRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]status[/\\]TransportSnapshotsStatusAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]state[/\\]ClusterStateRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]state[/\\]TransportClusterStateAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]stats[/\\]ClusterStatsNodeResponse.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]stats[/\\]ClusterStatsRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]stats[/\\]TransportClusterStatsAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]PendingClusterTasksRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]TransportPendingClusterTasksAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]MultiSearchRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchPhaseController.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsRequest.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsRequest.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsResponse.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsWriter.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TransportMultiTermVectorsAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TransportShardMultiTermsVectorAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TransportTermVectorsAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]TransportUpdateAction.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequest.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequestBuilder.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicyConfig.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]SearchSlowLog.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisRegistry.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]CustomAnalyzerProvider.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]ShingleTokenFilterFactory.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]cache[/\\]bitset[/\\]BitsetFilterCache.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]codec[/\\]PerFieldMappingPostingFormatCodec.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]ElasticsearchConcurrentMergeScheduler.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]Engine.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngine.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]LiveVersionMap.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]get[/\\]ShardGetService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]merge[/\\]MergeStats.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MultiMatchQuery.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexEventListener.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexSearcherWrapper.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexShard.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexingStats.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShardPath.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShardStateMetaData.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]StoreRecovery.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]Store.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]termvectors[/\\]TermVectorsService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]translog[/\\]BaseTranslogReader.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]translog[/\\]Translog.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]translog[/\\]TranslogReader.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]translog[/\\]TranslogSnapshot.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]translog[/\\]TranslogWriter.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndexingMemoryController.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analysis[/\\]PreBuiltCacheFactory.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]breaker[/\\]HierarchyCircuitBreakerService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]flush[/\\]ShardsSyncedFlushResult.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]flush[/\\]SyncedFlushService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]PeerRecoverySourceService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]RecoveryFailedException.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]RecoverySettings.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]RecoveryState.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]IndicesStore.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]TransportNodesListShardStoreMetaData.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]GcNames.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]HotThreads.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" />
@ -183,83 +95,14 @@
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotsService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]VersionTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]RejectionActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]HotThreadsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]health[/\\]ClusterHealthResponsesTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]RepositoryBlocksIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]settings[/\\]SettingsUpdaterTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]SnapshotBlocksIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]state[/\\]ClusterStateRequestTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]stats[/\\]ClusterStatsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchRequestBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]WaitActiveShardCountIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]node[/\\]TransportBroadcastByNodeActionTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]BroadcastReplicationTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]TransportInstanceSingleOperationActionTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]AbstractTermVectorsTestCase.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]GetTermVectorsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsUnitTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]aliases[/\\]IndexAliasesIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]deps[/\\]joda[/\\]SimpleJodaTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]EnvironmentTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]NodeEnvironmentTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]explain[/\\]ExplainActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]get[/\\]GetActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLogTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicySettingsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]SearchSlowLogTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PreBuiltAnalyzerTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngineMergeIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngineTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoolQueryBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]BoostingQueryBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]GeoDistanceQueryBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MoreLikeThisQueryBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MultiMatchQueryBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SpanNotQueryBuilderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]functionscore[/\\]FunctionScoreTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]geo[/\\]GeoUtilsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]AbstractNumberNestedSortingTestCase.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]DoubleNestedSortingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]FloatNestedSortingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]LongNestedSortingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]NestedSortingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexShardIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexShardTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShardPathTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]similarity[/\\]SimilarityTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]CorruptedFileIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]CorruptedTranslogIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]IndexStoreTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]StoreTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]translog[/\\]TranslogTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indexing[/\\]IndexActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indexlifecycle[/\\]IndexLifecycleActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndexingMemoryControllerTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesLifecycleListenerIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesOptionsIntegrationIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]AnalyzeActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]exists[/\\]types[/\\]TypesExistsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]flush[/\\]FlushIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]flush[/\\]SyncedFlushSingleNodeTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]flush[/\\]SyncedFlushUtil.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]mapping[/\\]ConcurrentDynamicTemplateIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]mapping[/\\]SimpleGetFieldMappingsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]mapping[/\\]SimpleGetMappingsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]mapping[/\\]UpdateMappingIntegrationIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]memory[/\\]breaker[/\\]CircuitBreakerUnitTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]memory[/\\]breaker[/\\]RandomExceptionCircuitBreakerIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]IndexPrimaryRelocationIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]IndexRecoveryIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]RecoverySourceHandlerTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]RecoveryStatusTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]settings[/\\]UpdateNumberOfReplicasIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]state[/\\]OpenCloseIndexIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]state[/\\]RareClusterStateIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]stats[/\\]IndexStatsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]IndicesStoreIntegrationIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]IndicesStoreTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]JvmGcMonitorServiceSettingsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]recovery[/\\]FullRollingRestartIT.java" checks="LineLength" />
@ -309,17 +152,7 @@
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]ContextCompletionSuggestSearchIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CategoryContextMappingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]GeoContextMappingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]similarity[/\\]SimilarityIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]AbstractSnapshotIntegTestCase.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]DedicatedClusterSnapshotRestoreIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RepositoriesIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SharedClusterSnapshotRestoreIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotUtilsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]geo[/\\]RandomShapeGenerator.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]hamcrest[/\\]ElasticsearchGeoAssertions.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]update[/\\]UpdateIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]validate[/\\]SimpleValidateQueryIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]versioning[/\\]SimpleVersioningIT.java" checks="LineLength" />
<!-- Temporarily contains extra-long lines as examples for tests to be written, see https://github.com/elastic/elasticsearch/issues/34829 -->
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />
<suppress files="modules[/\\]reindex[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]reindex[/\\]TransportUpdateByQueryAction.java" checks="LineLength" />
</suppressions>

View File

@ -23,7 +23,6 @@ import org.apache.commons.io.FileUtils;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.GradleRunner;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;
@ -39,7 +38,6 @@ import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Ignore
public class BuildExamplePluginsIT extends GradleIntegrationTestCase {
private static List<File> EXAMPLE_PLUGINS = Collections.unmodifiableList(

View File

@ -23,6 +23,8 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.ccr.PauseFollowRequest;
import org.elasticsearch.client.ccr.PutFollowRequest;
import org.elasticsearch.client.ccr.PutFollowResponse;
import org.elasticsearch.client.ccr.ResumeFollowRequest;
import org.elasticsearch.client.ccr.UnfollowRequest;
import org.elasticsearch.client.core.AcknowledgedResponse;
import java.io.IOException;
@ -89,7 +91,7 @@ public final class CcrClient {
}
/**
* Instructs a follower index the pause the following of a leader index.
* Instructs a follower index to pause the following of a leader index.
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-pause-follow.html">
* the docs</a> for more.
@ -110,7 +112,7 @@ public final class CcrClient {
}
/**
* Asynchronously instruct a follower index the pause the following of a leader index.
* Asynchronously instruct a follower index to pause the following of a leader index.
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-pause-follow.html">
* the docs</a> for more.
@ -130,4 +132,91 @@ public final class CcrClient {
Collections.emptySet());
}
/**
* Instructs a follower index to resume the following of a leader index.
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-resume-follow.html">
* the docs</a> for more.
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse resumeFollow(ResumeFollowRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
request,
CcrRequestConverters::resumeFollow,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet()
);
}
/**
* Asynchronously instruct a follower index to resume the following of a leader index.
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-resume-follow.html">
* the docs</a> for more.
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
*/
public void resumeFollowAsync(ResumeFollowRequest request,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(
request,
CcrRequestConverters::resumeFollow,
options,
AcknowledgedResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Instructs a follower index to unfollow and become a regular index.
* Note that index following needs to be paused and the follower index needs to be closed.
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-unfollow.html">
* the docs</a> for more.
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse unfollow(UnfollowRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
request,
CcrRequestConverters::unfollow,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet()
);
}
/**
* Asynchronously instructs a follower index to unfollow and become a regular index.
* Note that index following needs to be paused and the follower index needs to be closed.
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-unfollow.html">
* the docs</a> for more.
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
*/
public void unfollowAsync(UnfollowRequest request,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(
request,
CcrRequestConverters::unfollow,
options,
AcknowledgedResponse::fromXContent,
listener,
Collections.emptySet()
);
}
}

View File

@ -23,6 +23,8 @@ import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.ccr.PauseFollowRequest;
import org.elasticsearch.client.ccr.PutFollowRequest;
import org.elasticsearch.client.ccr.ResumeFollowRequest;
import org.elasticsearch.client.ccr.UnfollowRequest;
import java.io.IOException;
@ -49,4 +51,22 @@ final class CcrRequestConverters {
return new Request(HttpPost.METHOD_NAME, endpoint);
}
static Request resumeFollow(ResumeFollowRequest resumeFollowRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPart(resumeFollowRequest.getFollowerIndex())
.addPathPartAsIs("_ccr", "resume_follow")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(resumeFollowRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request unfollow(UnfollowRequest unfollowRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPart(unfollowRequest.getFollowerIndex())
.addPathPartAsIs("_ccr", "unfollow")
.build();
return new Request(HttpPost.METHOD_NAME, endpoint);
}
}

View File

@ -47,8 +47,8 @@ public class IndexLifecycleClient {
}
/**
* Retrieve one or more lifecycle policy definition
* See <a href="https://fix-me-when-we-have-docs.com">
* Retrieve one or more lifecycle policy definition. See
* <a href="https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/java-rest-high-ilm-ilm-get-lifecycle-policy.html">
* the docs</a> for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -62,8 +62,8 @@ public class IndexLifecycleClient {
}
/**
* Asynchronously retrieve one or more lifecycle policy definition
* See <a href="https://fix-me-when-we-have-docs.com">
* Asynchronously retrieve one or more lifecycle policy definition. See
* <a href="https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/java-rest-high-ilm-ilm-get-lifecycle-policy.html">
* the docs</a> for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -76,7 +76,7 @@ public class IndexLifecycleClient {
}
/**
* Create or modify a lifecycle definition See <a href=
* Create or modify a lifecycle definition. See <a href=
* "https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/java-rest-high-ilm-ilm-put-lifecycle-policy.html">
* the docs</a> for more.
* @param request the request
@ -91,8 +91,8 @@ public class IndexLifecycleClient {
}
/**
* Asynchronously create or modify a lifecycle definition
* See <a href="https://fix-me-when-we-have-docs.com">
* Asynchronously create or modify a lifecycle definition. See <a href=
* "https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/java-rest-high-ilm-ilm-put-lifecycle-policy.html">
* the docs</a> for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
@ -282,7 +282,7 @@ public class IndexLifecycleClient {
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse retryLifecycleStep(RetryLifecyclePolicyRequest request, RequestOptions options) throws IOException {
public AcknowledgedResponse retryLifecyclePolicy(RetryLifecyclePolicyRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, IndexLifecycleRequestConverters::retryLifecycle, options,
AcknowledgedResponse::fromXContent, emptySet());
}
@ -295,8 +295,8 @@ public class IndexLifecycleClient {
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void retryLifecycleStepAsync(RetryLifecyclePolicyRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
public void retryLifecyclePolicyAsync(RetryLifecyclePolicyRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, IndexLifecycleRequestConverters::retryLifecycle, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
}

View File

@ -28,13 +28,17 @@ import org.apache.http.entity.ByteArrayEntity;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.ForecastJobRequest;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetDatafeedRequest;
@ -47,17 +51,21 @@ import org.elasticsearch.client.ml.GetModelSnapshotsRequest;
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PostCalendarEventRequest;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotRequest;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
@ -335,6 +343,18 @@ final class MLRequestConverters {
return request;
}
static Request deleteModelSnapshot(DeleteModelSnapshotRequest deleteModelSnapshotRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(deleteModelSnapshotRequest.getJobId())
.addPathPartAsIs("model_snapshots")
.addPathPart(deleteModelSnapshotRequest.getSnapshotId())
.build();
return new Request(HttpDelete.METHOD_NAME, endpoint);
}
static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
@ -376,6 +396,36 @@ final class MLRequestConverters {
return request;
}
static Request updateModelSnapshot(UpdateModelSnapshotRequest updateModelSnapshotRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(updateModelSnapshotRequest.getJobId())
.addPathPartAsIs("model_snapshots")
.addPathPart(updateModelSnapshotRequest.getSnapshotId())
.addPathPartAsIs("_update")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(updateModelSnapshotRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request revertModelSnapshot(RevertModelSnapshotRequest revertModelSnapshotsRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("anomaly_detectors")
.addPathPart(revertModelSnapshotsRequest.getJobId())
.addPathPartAsIs("model_snapshots")
.addPathPart(revertModelSnapshotsRequest.getSnapshotId())
.addPathPart("_revert")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(revertModelSnapshotsRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request getOverallBuckets(GetOverallBucketsRequest getOverallBucketsRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
@ -471,6 +521,30 @@ final class MLRequestConverters {
return request;
}
static Request putCalendarJob(PutCalendarJobRequest putCalendarJobRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("calendars")
.addPathPart(putCalendarJobRequest.getCalendarId())
.addPathPartAsIs("jobs")
.addPathPart(Strings.collectionToCommaDelimitedString(putCalendarJobRequest.getJobIds()))
.build();
return new Request(HttpPut.METHOD_NAME, endpoint);
}
static Request deleteCalendarJob(DeleteCalendarJobRequest deleteCalendarJobRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("calendars")
.addPathPart(deleteCalendarJobRequest.getCalendarId())
.addPathPartAsIs("jobs")
.addPathPart(Strings.collectionToCommaDelimitedString(deleteCalendarJobRequest.getJobIds()))
.build();
return new Request(HttpDelete.METHOD_NAME, endpoint);
}
static Request deleteCalendar(DeleteCalendarRequest deleteCalendarRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
@ -482,6 +556,34 @@ final class MLRequestConverters {
return request;
}
static Request getCalendarEvents(GetCalendarEventsRequest getCalendarEventsRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("calendars")
.addPathPart(getCalendarEventsRequest.getCalendarId())
.addPathPartAsIs("events")
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
request.setEntity(createEntity(getCalendarEventsRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request postCalendarEvents(PostCalendarEventRequest postCalendarEventRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("ml")
.addPathPartAsIs("calendars")
.addPathPart(postCalendarEventRequest.getCalendarId())
.addPathPartAsIs("events")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(postCalendarEventRequest,
REQUEST_BODY_CONTENT_TYPE,
PostCalendarEventRequest.EXCLUDE_CALENDAR_ID_PARAMS));
return request;
}
static Request putFilter(PutFilterRequest putFilterRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
@ -524,4 +626,13 @@ final class MLRequestConverters {
request.setEntity(createEntity(updateFilterRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request deleteFilter(DeleteFilterRequest deleteFilterRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack", "ml", "filters")
.addPathPart(deleteFilterRequest.getId())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
return request;
}
}

View File

@ -22,17 +22,22 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.ForecastJobRequest;
import org.elasticsearch.client.ml.ForecastJobResponse;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsResponse;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetCategoriesRequest;
@ -57,10 +62,13 @@ import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostCalendarEventRequest;
import org.elasticsearch.client.ml.PostCalendarEventResponse;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
@ -69,6 +77,8 @@ import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutFilterResponse;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.PutJobResponse;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotResponse;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
@ -76,6 +86,8 @@ import org.elasticsearch.client.ml.StopDatafeedResponse;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotResponse;
import org.elasticsearch.client.ml.job.stats.JobStats;
import java.io.IOException;
@ -464,6 +476,88 @@ public final class MachineLearningClient {
Collections.emptySet());
}
/**
* Deletes Machine Learning Model Snapshots
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html">
* ML Delete Model Snapshot documentation</a>
*
* @param request The request to delete the model snapshot
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return action acknowledgement
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse deleteModelSnapshot(DeleteModelSnapshotRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::deleteModelSnapshot,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
}
/**
* Deletes Machine Learning Model Snapshots asynchronously and notifies the listener on completion
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html">
* ML Delete Model Snapshot documentation</a>
*
* @param request The request to delete the model snapshot
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void deleteModelSnapshotAsync(DeleteModelSnapshotRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::deleteModelSnapshot,
options,
AcknowledgedResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Reverts to a particular Machine Learning Model Snapshot
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-revert-snapshot.html">
* ML Revert Model Snapshot documentation</a>
*
* @param request The request to revert to a previous model snapshot
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return action acknowledgement
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public RevertModelSnapshotResponse revertModelSnapshot(RevertModelSnapshotRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::revertModelSnapshot,
options,
RevertModelSnapshotResponse::fromXContent,
Collections.emptySet());
}
/**
* Reverts to a particular Machine Learning Model Snapshot asynchronously and notifies the listener on completion
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-revert-snapshot.html">
* ML Revert Model Snapshot documentation</a>
*
* @param request The request to revert to a previous model snapshot
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void revertModelSnapshotAsync(RevertModelSnapshotRequest request, RequestOptions options,
ActionListener<RevertModelSnapshotResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::revertModelSnapshot,
options,
RevertModelSnapshotResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Creates a new Machine Learning Datafeed
* <p>
@ -940,6 +1034,47 @@ public final class MachineLearningClient {
Collections.emptySet());
}
/**
* Updates a snapshot for a Machine Learning Job.
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-snapshot.html">
* ML UPDATE model snapshots documentation</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public UpdateModelSnapshotResponse updateModelSnapshot(UpdateModelSnapshotRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::updateModelSnapshot,
options,
UpdateModelSnapshotResponse::fromXContent,
Collections.emptySet());
}
/**
* Updates a snapshot for a Machine Learning Job, notifies listener once the requested snapshots are retrieved.
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-snapshot.html">
* ML UPDATE model snapshots documentation</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void updateModelSnapshotAsync(UpdateModelSnapshotRequest request, RequestOptions options,
ActionListener<UpdateModelSnapshotResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::updateModelSnapshot,
options,
UpdateModelSnapshotResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Gets overall buckets for a set of Machine Learning Jobs.
* <p>
@ -1174,6 +1309,88 @@ public final class MachineLearningClient {
Collections.emptySet());
}
/**
* Adds Machine Learning Job(s) to a calendar
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar-job.html">
* ML Put calendar job documentation</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return The {@link PutCalendarResponse} containing the updated calendar
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public PutCalendarResponse putCalendarJob(PutCalendarJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::putCalendarJob,
options,
PutCalendarResponse::fromXContent,
Collections.emptySet());
}
/**
* Adds Machine Learning Job(s) to a calendar, notifies listener when completed
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar-job.html">
* ML Put calendar job documentation</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void putCalendarJobAsync(PutCalendarJobRequest request, RequestOptions options, ActionListener<PutCalendarResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::putCalendarJob,
options,
PutCalendarResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Removes Machine Learning Job(s) from a calendar
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar-job.html">
* ML Delete calendar job documentation</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return The {@link PutCalendarResponse} containing the updated calendar
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public PutCalendarResponse deleteCalendarJob(DeleteCalendarJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::deleteCalendarJob,
options,
PutCalendarResponse::fromXContent,
Collections.emptySet());
}
/**
* Removes Machine Learning Job(s) from a calendar, notifies listener when completed
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar-job.html">
* ML Delete calendar job documentation</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void deleteCalendarJobAsync(DeleteCalendarJobRequest request,
RequestOptions options,
ActionListener<PutCalendarResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::deleteCalendarJob,
options,
PutCalendarResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Deletes the given Machine Learning Calendar
* <p>
@ -1214,6 +1431,88 @@ public final class MachineLearningClient {
Collections.emptySet());
}
/**
* Gets the events for a machine learning calendar
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-calendar-event.html">
* GET Calendar Events API</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return The {@link PostCalendarEventRequest} containing the scheduled events
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public GetCalendarEventsResponse getCalendarEvents(GetCalendarEventsRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::getCalendarEvents,
options,
GetCalendarEventsResponse::fromXContent,
Collections.emptySet());
}
/**
* Gets the events for a a machine learning calendar asynchronously, notifies the listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-calendar-event.html">
* GET Calendar Events API</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void getCalendarEventsAsync(GetCalendarEventsRequest request, RequestOptions options,
ActionListener<GetCalendarEventsResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::getCalendarEvents,
options,
GetCalendarEventsResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Creates new events for a a machine learning calendar
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-calendar-event.html">
* Add Events to Calendar API</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return The {@link PostCalendarEventRequest} containing the scheduled events
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public PostCalendarEventResponse postCalendarEvent(PostCalendarEventRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::postCalendarEvents,
options,
PostCalendarEventResponse::fromXContent,
Collections.emptySet());
}
/**
* Creates new events for a a machine learning calendar asynchronously, notifies the listener on completion
* <p>
* For additional info
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-calendar-event.html">
* Add Events to Calendar API</a>
*
* @param request The request
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void postCalendarEventAsync(PostCalendarEventRequest request, RequestOptions options,
ActionListener<PostCalendarEventResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::postCalendarEvents,
options,
PostCalendarEventResponse::fromXContent,
listener,
Collections.emptySet());
}
/**
* Creates a new Machine Learning Filter
* <p>
@ -1329,4 +1628,44 @@ public final class MachineLearningClient {
listener,
Collections.emptySet());
}
/**
* Deletes the given Machine Learning filter
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-filter.html">
* ML Delete Filter documentation</a>
*
* @param request The request to delete the filter
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return action acknowledgement
* @throws IOException when there is a serialization issue sending the request or receiving the response
*/
public AcknowledgedResponse deleteFilter(DeleteFilterRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
MLRequestConverters::deleteFilter,
options,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
}
/**
* Deletes the given Machine Learning filter asynchronously and notifies the listener on completion
* <p>
* For additional info
* see <a href="http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-filter.html">
* ML Delete Filter documentation</a>
*
* @param request The request to delete the filter
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener Listener to be notified upon request completion
*/
public void deleteFilterAsync(DeleteFilterRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
MLRequestConverters::deleteFilter,
options,
AcknowledgedResponse::fromXContent,
listener,
Collections.emptySet());
}
}

View File

@ -50,6 +50,8 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.MultiTermVectorsRequest;
import org.elasticsearch.client.core.TermVectorsRequest;
import org.elasticsearch.client.security.RefreshPolicy;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Nullable;
@ -78,7 +80,6 @@ import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
import org.elasticsearch.script.mustache.SearchTemplateRequest;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.client.core.TermVectorsRequest;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@ -264,7 +265,7 @@ final class RequestConverters {
return request;
}
static Request sourceExists(GetRequest getRequest) {
Request request = new Request(HttpHead.METHOD_NAME, endpoint(getRequest.index(), getRequest.type(), getRequest.id(), "_source"));
@ -275,7 +276,7 @@ final class RequestConverters {
parameters.withRealtime(getRequest.realtime());
// Version params are not currently supported by the source exists API so are not passed
return request;
}
}
static Request multiGet(MultiGetRequest multiGetRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_mget");
@ -626,6 +627,13 @@ final class RequestConverters {
return request;
}
static Request mtermVectors(MultiTermVectorsRequest mtvrequest) throws IOException {
String endpoint = "_mtermvectors";
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
request.setEntity(createEntity(mtvrequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request getScript(GetStoredScriptRequest getStoredScriptRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
@ -644,7 +652,12 @@ final class RequestConverters {
}
static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
return createEntity(toXContent, xContentType, ToXContent.EMPTY_PARAMS);
}
static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType, ToXContent.Params toXContentParams)
throws IOException {
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, toXContentParams, false).toBytesRef();
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
}

View File

@ -60,6 +60,8 @@ import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.CountResponse;
import org.elasticsearch.client.core.TermVectorsResponse;
import org.elasticsearch.client.core.TermVectorsRequest;
import org.elasticsearch.client.core.MultiTermVectorsRequest;
import org.elasticsearch.client.core.MultiTermVectorsResponse;
import org.elasticsearch.client.tasks.TaskSubmissionResponse;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.CheckedFunction;
@ -1158,6 +1160,37 @@ public class RestHighLevelClient implements Closeable {
}
/**
* Calls the Multi Term Vectors API
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-multi-termvectors.html">Multi Term Vectors API
* on elastic.co</a>
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
*/
public final MultiTermVectorsResponse mtermvectors(MultiTermVectorsRequest request, RequestOptions options) throws IOException {
return performRequestAndParseEntity(
request, RequestConverters::mtermVectors, options, MultiTermVectorsResponse::fromXContent, emptySet());
}
/**
* Asynchronously calls the Multi Term Vectors API
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-multi-termvectors.html">Multi Term Vectors API
* on elastic.co</a>
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public final void mtermvectorsAsync(MultiTermVectorsRequest request, RequestOptions options,
ActionListener<MultiTermVectorsResponse> listener) {
performRequestAsyncAndParseEntity(
request, RequestConverters::mtermVectors, options, MultiTermVectorsResponse::fromXContent, listener, emptySet());
}
/**
* Executes a request using the Ranking Evaluation API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-rank-eval.html">Ranking Evaluation API

View File

@ -20,8 +20,8 @@
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
import org.elasticsearch.client.rollup.DeleteRollupJobResponse;
import org.elasticsearch.client.rollup.GetRollupIndexCapsRequest;
import org.elasticsearch.client.rollup.GetRollupIndexCapsResponse;
import org.elasticsearch.client.rollup.GetRollupJobRequest;
@ -31,7 +31,6 @@ import org.elasticsearch.client.rollup.GetRollupCapsResponse;
import org.elasticsearch.client.rollup.GetRollupJobRequest;
import org.elasticsearch.client.rollup.GetRollupJobResponse;
import org.elasticsearch.client.rollup.PutRollupJobRequest;
import org.elasticsearch.client.rollup.PutRollupJobResponse;
import org.elasticsearch.client.rollup.StartRollupJobRequest;
import org.elasticsearch.client.rollup.StartRollupJobResponse;
import org.elasticsearch.client.rollup.StopRollupJobRequest;
@ -64,11 +63,11 @@ public class RollupClient {
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public PutRollupJobResponse putRollupJob(PutRollupJobRequest request, RequestOptions options) throws IOException {
public AcknowledgedResponse putRollupJob(PutRollupJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
RollupRequestConverters::putJob,
options,
PutRollupJobResponse::fromXContent,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
}
@ -80,11 +79,11 @@ public class RollupClient {
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void putRollupJobAsync(PutRollupJobRequest request, RequestOptions options, ActionListener<PutRollupJobResponse> listener) {
public void putRollupJobAsync(PutRollupJobRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
RollupRequestConverters::putJob,
options,
PutRollupJobResponse::fromXContent,
AcknowledgedResponse::fromXContent,
listener, Collections.emptySet());
}
@ -165,11 +164,11 @@ public class RollupClient {
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public DeleteRollupJobResponse deleteRollupJob(DeleteRollupJobRequest request, RequestOptions options) throws IOException {
public AcknowledgedResponse deleteRollupJob(DeleteRollupJobRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request,
RollupRequestConverters::deleteJob,
options,
DeleteRollupJobResponse::fromXContent,
AcknowledgedResponse::fromXContent,
Collections.emptySet());
}
/**
@ -182,11 +181,11 @@ public class RollupClient {
*/
public void deleteRollupJobAsync(DeleteRollupJobRequest request,
RequestOptions options,
ActionListener<DeleteRollupJobResponse> listener) {
ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request,
RollupRequestConverters::deleteJob,
options,
DeleteRollupJobResponse::fromXContent,
AcknowledgedResponse::fromXContent,
listener, Collections.emptySet());
}

View File

@ -65,7 +65,14 @@ final class RollupRequestConverters {
.addPathPart(stopRollupJobRequest.getJobId())
.addPathPartAsIs("_stop")
.build();
return new Request(HttpPost.METHOD_NAME, endpoint);
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request);
parameters.withTimeout(stopRollupJobRequest.timeout());
if (stopRollupJobRequest.waitForCompletion() != null) {
parameters.withWaitForCompletion(stopRollupJobRequest.waitForCompletion());
}
return request;
}
static Request getJob(final GetRollupJobRequest getRollupJobRequest) {

View File

@ -42,6 +42,8 @@ import org.elasticsearch.client.security.GetRoleMappingsRequest;
import org.elasticsearch.client.security.GetRoleMappingsResponse;
import org.elasticsearch.client.security.GetSslCertificatesRequest;
import org.elasticsearch.client.security.GetSslCertificatesResponse;
import org.elasticsearch.client.security.HasPrivilegesRequest;
import org.elasticsearch.client.security.HasPrivilegesResponse;
import org.elasticsearch.client.security.InvalidateTokenRequest;
import org.elasticsearch.client.security.InvalidateTokenResponse;
import org.elasticsearch.client.security.PutRoleMappingRequest;
@ -244,6 +246,34 @@ public final class SecurityClient {
AuthenticateResponse::fromXContent, listener, emptySet());
}
/**
* Determine whether the current user has a specified list of privileges
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-has-privileges.html">
* the docs</a> for more.
*
* @param request the request with the privileges to check
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response from the has privileges call
*/
public HasPrivilegesResponse hasPrivileges(HasPrivilegesRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::hasPrivileges, options,
HasPrivilegesResponse::fromXContent, emptySet());
}
/**
* Asynchronously determine whether the current user has a specified list of privileges
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-has-privileges.html">
* the docs</a> for more.
*
* @param request the request with the privileges to check
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void hasPrivilegesAsync(HasPrivilegesRequest request, RequestOptions options, ActionListener<HasPrivilegesResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::hasPrivileges, options,
HasPrivilegesResponse::fromXContent, listener, emptySet());
}
/**
* Clears the cache in one or more realms.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-cache.html">

View File

@ -30,6 +30,7 @@ import org.elasticsearch.client.security.CreateTokenRequest;
import org.elasticsearch.client.security.DeletePrivilegesRequest;
import org.elasticsearch.client.security.DeleteRoleMappingRequest;
import org.elasticsearch.client.security.DeleteRoleRequest;
import org.elasticsearch.client.security.HasPrivilegesRequest;
import org.elasticsearch.client.security.DisableUserRequest;
import org.elasticsearch.client.security.EnableUserRequest;
import org.elasticsearch.client.security.GetRoleMappingsRequest;
@ -114,6 +115,12 @@ final class SecurityRequestConverters {
return request;
}
static Request hasPrivileges(HasPrivilegesRequest hasPrivilegesRequest) throws IOException {
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/security/user/_has_privileges");
request.setEntity(createEntity(hasPrivilegesRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request clearRealmCache(ClearRealmCacheRequest clearRealmCacheRequest) {
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_xpack/security/realm");

View File

@ -0,0 +1,203 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ccr;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public class FollowConfig {
static final ParseField MAX_READ_REQUEST_OPERATION_COUNT = new ParseField("max_read_request_operation_count");
static final ParseField MAX_READ_REQUEST_SIZE = new ParseField("max_read_request_size");
static final ParseField MAX_OUTSTANDING_READ_REQUESTS = new ParseField("max_outstanding_read_requests");
static final ParseField MAX_WRITE_REQUEST_OPERATION_COUNT = new ParseField("max_write_request_operation_count");
static final ParseField MAX_WRITE_REQUEST_SIZE = new ParseField("max_write_request_size");
static final ParseField MAX_OUTSTANDING_WRITE_REQUESTS = new ParseField("max_outstanding_write_requests");
static final ParseField MAX_WRITE_BUFFER_COUNT = new ParseField("max_write_buffer_count");
static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size");
static final ParseField MAX_RETRY_DELAY_FIELD = new ParseField("max_retry_delay");
static final ParseField READ_POLL_TIMEOUT = new ParseField("read_poll_timeout");
private Integer maxReadRequestOperationCount;
private Integer maxOutstandingReadRequests;
private ByteSizeValue maxReadRequestSize;
private Integer maxWriteRequestOperationCount;
private ByteSizeValue maxWriteRequestSize;
private Integer maxOutstandingWriteRequests;
private Integer maxWriteBufferCount;
private ByteSizeValue maxWriteBufferSize;
private TimeValue maxRetryDelay;
private TimeValue readPollTimeout;
FollowConfig() {
}
public Integer getMaxReadRequestOperationCount() {
return maxReadRequestOperationCount;
}
public void setMaxReadRequestOperationCount(Integer maxReadRequestOperationCount) {
this.maxReadRequestOperationCount = maxReadRequestOperationCount;
}
public Integer getMaxOutstandingReadRequests() {
return maxOutstandingReadRequests;
}
public void setMaxOutstandingReadRequests(Integer maxOutstandingReadRequests) {
this.maxOutstandingReadRequests = maxOutstandingReadRequests;
}
public ByteSizeValue getMaxReadRequestSize() {
return maxReadRequestSize;
}
public void setMaxReadRequestSize(ByteSizeValue maxReadRequestSize) {
this.maxReadRequestSize = maxReadRequestSize;
}
public Integer getMaxWriteRequestOperationCount() {
return maxWriteRequestOperationCount;
}
public void setMaxWriteRequestOperationCount(Integer maxWriteRequestOperationCount) {
this.maxWriteRequestOperationCount = maxWriteRequestOperationCount;
}
public ByteSizeValue getMaxWriteRequestSize() {
return maxWriteRequestSize;
}
public void setMaxWriteRequestSize(ByteSizeValue maxWriteRequestSize) {
this.maxWriteRequestSize = maxWriteRequestSize;
}
public Integer getMaxOutstandingWriteRequests() {
return maxOutstandingWriteRequests;
}
public void setMaxOutstandingWriteRequests(Integer maxOutstandingWriteRequests) {
this.maxOutstandingWriteRequests = maxOutstandingWriteRequests;
}
public Integer getMaxWriteBufferCount() {
return maxWriteBufferCount;
}
public void setMaxWriteBufferCount(Integer maxWriteBufferCount) {
this.maxWriteBufferCount = maxWriteBufferCount;
}
public ByteSizeValue getMaxWriteBufferSize() {
return maxWriteBufferSize;
}
public void setMaxWriteBufferSize(ByteSizeValue maxWriteBufferSize) {
this.maxWriteBufferSize = maxWriteBufferSize;
}
public TimeValue getMaxRetryDelay() {
return maxRetryDelay;
}
public void setMaxRetryDelay(TimeValue maxRetryDelay) {
this.maxRetryDelay = maxRetryDelay;
}
public TimeValue getReadPollTimeout() {
return readPollTimeout;
}
public void setReadPollTimeout(TimeValue readPollTimeout) {
this.readPollTimeout = readPollTimeout;
}
void toXContentFragment(XContentBuilder builder, ToXContent.Params params) throws IOException {
if (maxReadRequestOperationCount != null) {
builder.field(MAX_READ_REQUEST_OPERATION_COUNT.getPreferredName(), maxReadRequestOperationCount);
}
if (maxReadRequestSize != null) {
builder.field(MAX_READ_REQUEST_SIZE.getPreferredName(), maxReadRequestSize.getStringRep());
}
if (maxWriteRequestOperationCount != null) {
builder.field(MAX_WRITE_REQUEST_OPERATION_COUNT.getPreferredName(), maxWriteRequestOperationCount);
}
if (maxWriteRequestSize != null) {
builder.field(MAX_WRITE_REQUEST_SIZE.getPreferredName(), maxWriteRequestSize.getStringRep());
}
if (maxWriteBufferCount != null) {
builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount);
}
if (maxWriteBufferSize != null) {
builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep());
}
if (maxOutstandingReadRequests != null) {
builder.field(MAX_OUTSTANDING_READ_REQUESTS.getPreferredName(), maxOutstandingReadRequests);
}
if (maxOutstandingWriteRequests != null) {
builder.field(MAX_OUTSTANDING_WRITE_REQUESTS.getPreferredName(), maxOutstandingWriteRequests);
}
if (maxRetryDelay != null) {
builder.field(MAX_RETRY_DELAY_FIELD.getPreferredName(), maxRetryDelay.getStringRep());
}
if (readPollTimeout != null) {
builder.field(READ_POLL_TIMEOUT.getPreferredName(), readPollTimeout.getStringRep());
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FollowConfig that = (FollowConfig) o;
return Objects.equals(maxReadRequestOperationCount, that.maxReadRequestOperationCount) &&
Objects.equals(maxOutstandingReadRequests, that.maxOutstandingReadRequests) &&
Objects.equals(maxReadRequestSize, that.maxReadRequestSize) &&
Objects.equals(maxWriteRequestOperationCount, that.maxWriteRequestOperationCount) &&
Objects.equals(maxWriteRequestSize, that.maxWriteRequestSize) &&
Objects.equals(maxOutstandingWriteRequests, that.maxOutstandingWriteRequests) &&
Objects.equals(maxWriteBufferCount, that.maxWriteBufferCount) &&
Objects.equals(maxWriteBufferSize, that.maxWriteBufferSize) &&
Objects.equals(maxRetryDelay, that.maxRetryDelay) &&
Objects.equals(readPollTimeout, that.readPollTimeout);
}
@Override
public int hashCode() {
return Objects.hash(
maxReadRequestOperationCount,
maxOutstandingReadRequests,
maxReadRequestSize,
maxWriteRequestOperationCount,
maxWriteRequestSize,
maxOutstandingWriteRequests,
maxWriteBufferCount,
maxWriteBufferSize,
maxRetryDelay,
readPollTimeout
);
}
}

View File

@ -21,43 +21,21 @@ package org.elasticsearch.client.ccr;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public final class PutFollowRequest implements Validatable, ToXContentObject {
public final class PutFollowRequest extends FollowConfig implements Validatable, ToXContentObject {
static final ParseField REMOTE_CLUSTER_FIELD = new ParseField("remote_cluster");
static final ParseField LEADER_INDEX_FIELD = new ParseField("leader_index");
static final ParseField FOLLOWER_INDEX_FIELD = new ParseField("follower_index");
static final ParseField MAX_READ_REQUEST_OPERATION_COUNT = new ParseField("max_read_request_operation_count");
static final ParseField MAX_READ_REQUEST_SIZE = new ParseField("max_read_request_size");
static final ParseField MAX_OUTSTANDING_READ_REQUESTS = new ParseField("max_outstanding_read_requests");
static final ParseField MAX_WRITE_REQUEST_OPERATION_COUNT = new ParseField("max_write_request_operation_count");
static final ParseField MAX_WRITE_REQUEST_SIZE = new ParseField("max_write_request_size");
static final ParseField MAX_OUTSTANDING_WRITE_REQUESTS = new ParseField("max_outstanding_write_requests");
static final ParseField MAX_WRITE_BUFFER_COUNT = new ParseField("max_write_buffer_count");
static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size");
static final ParseField MAX_RETRY_DELAY_FIELD = new ParseField("max_retry_delay");
static final ParseField READ_POLL_TIMEOUT = new ParseField("read_poll_timeout");
private final String remoteCluster;
private final String leaderIndex;
private final String followerIndex;
private Integer maxReadRequestOperationCount;
private Integer maxOutstandingReadRequests;
private ByteSizeValue maxReadRequestSize;
private Integer maxWriteRequestOperationCount;
private ByteSizeValue maxWriteRequestSize;
private Integer maxOutstandingWriteRequests;
private Integer maxWriteBufferCount;
private ByteSizeValue maxWriteBufferSize;
private TimeValue maxRetryDelay;
private TimeValue readPollTimeout;
public PutFollowRequest(String remoteCluster, String leaderIndex, String followerIndex) {
this.remoteCluster = Objects.requireNonNull(remoteCluster, "remoteCluster");
@ -71,36 +49,7 @@ public final class PutFollowRequest implements Validatable, ToXContentObject {
builder.field(REMOTE_CLUSTER_FIELD.getPreferredName(), remoteCluster);
builder.field(LEADER_INDEX_FIELD.getPreferredName(), leaderIndex);
builder.field(FOLLOWER_INDEX_FIELD.getPreferredName(), followerIndex);
if (maxReadRequestOperationCount != null) {
builder.field(MAX_READ_REQUEST_OPERATION_COUNT.getPreferredName(), maxReadRequestOperationCount);
}
if (maxReadRequestSize != null) {
builder.field(MAX_READ_REQUEST_SIZE.getPreferredName(), maxReadRequestSize.getStringRep());
}
if (maxWriteRequestOperationCount != null) {
builder.field(MAX_WRITE_REQUEST_OPERATION_COUNT.getPreferredName(), maxWriteRequestOperationCount);
}
if (maxWriteRequestSize != null) {
builder.field(MAX_WRITE_REQUEST_SIZE.getPreferredName(), maxWriteRequestSize.getStringRep());
}
if (maxWriteBufferCount != null) {
builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount);
}
if (maxWriteBufferSize != null) {
builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep());
}
if (maxOutstandingReadRequests != null) {
builder.field(MAX_OUTSTANDING_READ_REQUESTS.getPreferredName(), maxOutstandingReadRequests);
}
if (maxOutstandingWriteRequests != null) {
builder.field(MAX_OUTSTANDING_WRITE_REQUESTS.getPreferredName(), maxOutstandingWriteRequests);
}
if (maxRetryDelay != null) {
builder.field(MAX_RETRY_DELAY_FIELD.getPreferredName(), maxRetryDelay.getStringRep());
}
if (readPollTimeout != null) {
builder.field(READ_POLL_TIMEOUT.getPreferredName(), readPollTimeout.getStringRep());
}
toXContentFragment(builder, params);
builder.endObject();
return builder;
}
@ -117,122 +66,24 @@ public final class PutFollowRequest implements Validatable, ToXContentObject {
return followerIndex;
}
public Integer getMaxReadRequestOperationCount() {
return maxReadRequestOperationCount;
}
public void setMaxReadRequestOperationCount(Integer maxReadRequestOperationCount) {
this.maxReadRequestOperationCount = maxReadRequestOperationCount;
}
public Integer getMaxOutstandingReadRequests() {
return maxOutstandingReadRequests;
}
public void setMaxOutstandingReadRequests(Integer maxOutstandingReadRequests) {
this.maxOutstandingReadRequests = maxOutstandingReadRequests;
}
public ByteSizeValue getMaxReadRequestSize() {
return maxReadRequestSize;
}
public void setMaxReadRequestSize(ByteSizeValue maxReadRequestSize) {
this.maxReadRequestSize = maxReadRequestSize;
}
public Integer getMaxWriteRequestOperationCount() {
return maxWriteRequestOperationCount;
}
public void setMaxWriteRequestOperationCount(Integer maxWriteRequestOperationCount) {
this.maxWriteRequestOperationCount = maxWriteRequestOperationCount;
}
public ByteSizeValue getMaxWriteRequestSize() {
return maxWriteRequestSize;
}
public void setMaxWriteRequestSize(ByteSizeValue maxWriteRequestSize) {
this.maxWriteRequestSize = maxWriteRequestSize;
}
public Integer getMaxOutstandingWriteRequests() {
return maxOutstandingWriteRequests;
}
public void setMaxOutstandingWriteRequests(Integer maxOutstandingWriteRequests) {
this.maxOutstandingWriteRequests = maxOutstandingWriteRequests;
}
public Integer getMaxWriteBufferCount() {
return maxWriteBufferCount;
}
public void setMaxWriteBufferCount(Integer maxWriteBufferCount) {
this.maxWriteBufferCount = maxWriteBufferCount;
}
public ByteSizeValue getMaxWriteBufferSize() {
return maxWriteBufferSize;
}
public void setMaxWriteBufferSize(ByteSizeValue maxWriteBufferSize) {
this.maxWriteBufferSize = maxWriteBufferSize;
}
public TimeValue getMaxRetryDelay() {
return maxRetryDelay;
}
public void setMaxRetryDelay(TimeValue maxRetryDelay) {
this.maxRetryDelay = maxRetryDelay;
}
public TimeValue getReadPollTimeout() {
return readPollTimeout;
}
public void setReadPollTimeout(TimeValue readPollTimeout) {
this.readPollTimeout = readPollTimeout;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
PutFollowRequest that = (PutFollowRequest) o;
return Objects.equals(remoteCluster, that.remoteCluster) &&
Objects.equals(leaderIndex, that.leaderIndex) &&
Objects.equals(followerIndex, that.followerIndex) &&
Objects.equals(maxReadRequestOperationCount, that.maxReadRequestOperationCount) &&
Objects.equals(maxOutstandingReadRequests, that.maxOutstandingReadRequests) &&
Objects.equals(maxReadRequestSize, that.maxReadRequestSize) &&
Objects.equals(maxWriteRequestOperationCount, that.maxWriteRequestOperationCount) &&
Objects.equals(maxWriteRequestSize, that.maxWriteRequestSize) &&
Objects.equals(maxOutstandingWriteRequests, that.maxOutstandingWriteRequests) &&
Objects.equals(maxWriteBufferCount, that.maxWriteBufferCount) &&
Objects.equals(maxWriteBufferSize, that.maxWriteBufferSize) &&
Objects.equals(maxRetryDelay, that.maxRetryDelay) &&
Objects.equals(readPollTimeout, that.readPollTimeout);
Objects.equals(followerIndex, that.followerIndex);
}
@Override
public int hashCode() {
return Objects.hash(
super.hashCode(),
remoteCluster,
leaderIndex,
followerIndex,
maxReadRequestOperationCount,
maxOutstandingReadRequests,
maxReadRequestSize,
maxWriteRequestOperationCount,
maxWriteRequestSize,
maxOutstandingWriteRequests,
maxWriteBufferCount,
maxWriteBufferSize,
maxRetryDelay,
readPollTimeout
followerIndex
);
}
}

View File

@ -0,0 +1,65 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ccr;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.client.ccr.PutFollowRequest.FOLLOWER_INDEX_FIELD;
public final class ResumeFollowRequest extends FollowConfig implements Validatable, ToXContentObject {
private final String followerIndex;
public ResumeFollowRequest(String followerIndex) {
this.followerIndex = Objects.requireNonNull(followerIndex, "followerIndex");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(FOLLOWER_INDEX_FIELD.getPreferredName(), followerIndex);
toXContentFragment(builder, params);
builder.endObject();
return builder;
}
public String getFollowerIndex() {
return followerIndex;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
ResumeFollowRequest that = (ResumeFollowRequest) o;
return Objects.equals(followerIndex, that.followerIndex);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), followerIndex);
}
}

View File

@ -17,24 +17,21 @@
* under the License.
*/
package org.elasticsearch.client.rollup;
package org.elasticsearch.client.ccr;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.client.Validatable;
import java.io.IOException;
import java.util.Objects;
public class DeleteRollupJobResponse extends AcknowledgedResponse {
public final class UnfollowRequest implements Validatable {
public DeleteRollupJobResponse(boolean acknowledged) {
super(acknowledged);
private final String followerIndex;
public UnfollowRequest(String followerIndex) {
this.followerIndex = Objects.requireNonNull(followerIndex);
}
private static final ConstructingObjectParser<DeleteRollupJobResponse, Void> PARSER = AcknowledgedResponse
.generateParser("delete_rollup_job_response", DeleteRollupJobResponse::new, AcknowledgedResponse.PARSE_FIELD_NAME);
public static DeleteRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
public String getFollowerIndex() {
return followerIndex;
}
}

View File

@ -21,9 +21,6 @@ package org.elasticsearch.client.core;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
@ -32,7 +29,7 @@ import java.util.function.Function;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class AcknowledgedResponse implements ToXContentObject {
public class AcknowledgedResponse {
protected static final String PARSE_FIELD_NAME = "acknowledged";
private static final ConstructingObjectParser<AcknowledgedResponse, Void> PARSER = AcknowledgedResponse
@ -75,16 +72,6 @@ public class AcknowledgedResponse implements ToXContentObject {
return Objects.hash(acknowledged);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
{
builder.field(getFieldName(), isAcknowledged());
}
builder.endObject();
return builder;
}
/**
* @return the field name this response uses to output the acknowledged flag
*/

View File

@ -90,7 +90,8 @@ public final class CountRequest extends ActionRequest implements IndicesRequest.
/**
* The document types to execute the count against. Defaults to be executed against all types.
*
* @deprecated Types are going away, prefer filtering on a type.
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
* filter on a field on the document.
*/
@Deprecated
public CountRequest types(String... types) {
@ -172,6 +173,11 @@ public final class CountRequest extends ActionRequest implements IndicesRequest.
return this;
}
/**
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
* filter on a field on the document.
*/
@Deprecated
public String[] types() {
return Arrays.copyOf(this.types, this.types.length);
}

View File

@ -0,0 +1,77 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.core;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.client.core.TermVectorsRequest.createFromTemplate;
public class MultiTermVectorsRequest implements ToXContentObject, Validatable {
private List<TermVectorsRequest> requests = new ArrayList<>();
/**
* Constructs an empty MultiTermVectorsRequest
* After that use {@code add} method to add individual {@code TermVectorsRequest} to it.
*/
public MultiTermVectorsRequest() {};
/**
* Constructs a MultiTermVectorsRequest from the given document ids
* and a template {@code TermVectorsRequest}.
* Used when individual requests share the same index, type and other settings.
* @param ids - ids of documents for which term vectors are requested
* @param template - a template {@code TermVectorsRequest} that allows to set all
* settings only once for all requests.
*/
public MultiTermVectorsRequest(String[] ids, TermVectorsRequest template) {
for (String id : ids) {
TermVectorsRequest request = createFromTemplate(template, id);
requests.add(request);
}
}
/**
* Adds another {@code TermVectorsRequest} to this {@code MultiTermVectorsRequest}
* @param request - {@code TermVectorsRequest} to add
*/
public void add(TermVectorsRequest request) {
requests.add(request);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startArray("docs");
for (TermVectorsRequest request : requests) {
request.toXContent(builder, params);
}
builder.endArray();
builder.endObject();
return builder;
}
}

View File

@ -0,0 +1,77 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.core;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class MultiTermVectorsResponse {
private final List<TermVectorsResponse> responses;
public MultiTermVectorsResponse(List<TermVectorsResponse> responses) {
this.responses = responses;
}
private static ConstructingObjectParser<MultiTermVectorsResponse, Void> PARSER =
new ConstructingObjectParser<>("multi_term_vectors", true,
args -> {
// as the response comes from server, we are sure that args[0] will be a list of TermVectorsResponse
@SuppressWarnings("unchecked") List<TermVectorsResponse> termVectorsResponsesList = (List<TermVectorsResponse>) args[0];
return new MultiTermVectorsResponse(termVectorsResponsesList);
}
);
static {
PARSER.declareObjectArray(constructorArg(), (p,c) -> TermVectorsResponse.fromXContent(p), new ParseField("docs"));
}
public static MultiTermVectorsResponse fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
/**
* Returns the list of {@code TermVectorsResponse} for this {@code MultiTermVectorsResponse}
*/
public List<TermVectorsResponse> getTermVectorsResponses() {
return responses;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof MultiTermVectorsResponse)) return false;
MultiTermVectorsResponse other = (MultiTermVectorsResponse) obj;
return Objects.equals(responses, other.responses);
}
@Override
public int hashCode() {
return Objects.hash(responses);
}
}

View File

@ -33,6 +33,8 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
private final String index;
private final String type;
private String id = null;
private XContentBuilder docBuilder = null;
private String routing = null;
private String preference = null;
private boolean realtime = true;
@ -44,7 +46,6 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
private boolean requestTermStatistics = false;
private Map<String, String> perFieldAnalyzer = null;
private Map<String, Integer> filterSettings = null;
private XContentBuilder docBuilder = null;
/**
@ -54,7 +55,8 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
* @param docId - id of the document
*/
public TermVectorsRequest(String index, String type, String docId) {
this(index, type);
this.index = index;
this.type = type;
this.id = docId;
}
@ -62,10 +64,35 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
* Constructs TermVectorRequest for an artificial document
* @param index - index of the document
* @param type - type of the document
* @param docBuilder - an artificial document
*/
public TermVectorsRequest(String index, String type) {
public TermVectorsRequest(String index, String type, XContentBuilder docBuilder) {
this.index = index;
this.type = type;
this.docBuilder = docBuilder;
}
/**
* Constructs a new TermVectorRequest from a template
* using the provided document id
* @param template - a term vector request served as a template
* @param id - id of the requested document
*/
static TermVectorsRequest createFromTemplate(TermVectorsRequest template, String id) {
TermVectorsRequest request = new TermVectorsRequest(template.getIndex(), template.getType(), id);
request.realtime = template.getRealtime();
request.requestPositions = template.requestPositions;
request.requestPayloads = template.requestPayloads;
request.requestOffsets = template.requestOffsets;
request.requestFieldStatistics = template.requestFieldStatistics;
request.requestTermStatistics = template.requestTermStatistics;
if (template.routing != null) request.setRouting(template.getRouting());
if (template.preference != null) request.setPreference(template.getPreference());
if (template.fields != null) request.setFields(template.getFields());
if (template.perFieldAnalyzer != null) request.setPerFieldAnalyzer(template.perFieldAnalyzer);
if (template.filterSettings != null) request.setFilterSettings(template.filterSettings);
return request;
}
/**
@ -143,13 +170,6 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
this.perFieldAnalyzer = perFieldAnalyzer;
}
/**
* Sets an artifical document on what to request _termvectors
*/
public void setDoc(XContentBuilder docBuilder) {
this.docBuilder = docBuilder;
}
/**
* Sets conditions for terms filtering
*/
@ -197,6 +217,9 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("_index", index);
builder.field("_type", type);
if (id != null) builder.field("_id", id);
// set values only when different from defaults
if (requestPositions == false) builder.field("positions", false);
if (requestPayloads == false) builder.field("payloads", false);

View File

@ -23,7 +23,6 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.ArrayList;
@ -91,26 +90,16 @@ public class StartBasicResponse {
private String acknowledgeMessage;
public enum Status {
GENERATED_BASIC(true, null, RestStatus.OK),
ALREADY_USING_BASIC(false, "Operation failed: Current license is basic.", RestStatus.FORBIDDEN),
NEED_ACKNOWLEDGEMENT(false, "Operation failed: Needs acknowledgement.", RestStatus.OK);
GENERATED_BASIC(true, null),
ALREADY_USING_BASIC(false, "Operation failed: Current license is basic."),
NEED_ACKNOWLEDGEMENT(false, "Operation failed: Needs acknowledgement.");
private final boolean isBasicStarted;
private final String errorMessage;
private final RestStatus restStatus;
Status(boolean isBasicStarted, String errorMessage, RestStatus restStatus) {
Status(boolean isBasicStarted, String errorMessage) {
this.isBasicStarted = isBasicStarted;
this.errorMessage = errorMessage;
this.restStatus = restStatus;
}
String getErrorMessage() {
return errorMessage;
}
boolean isBasicStarted() {
return isBasicStarted;
}
static StartBasicResponse.Status fromErrorMessage(final String errorMessage) {
@ -126,14 +115,11 @@ public class StartBasicResponse {
private StartBasicResponse.Status status;
public StartBasicResponse() {
}
StartBasicResponse(StartBasicResponse.Status status) {
private StartBasicResponse(StartBasicResponse.Status status) {
this(status, Collections.emptyMap(), null);
}
StartBasicResponse(StartBasicResponse.Status status,
private StartBasicResponse(StartBasicResponse.Status status,
Map<String, String[]> acknowledgeMessages, String acknowledgeMessage) {
this.status = status;
this.acknowledgeMessages = acknowledgeMessages;
@ -167,5 +153,4 @@ public class StartBasicResponse {
public static StartBasicResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import java.security.InvalidParameterException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
/**
* Request class for removing Machine Learning Jobs from an existing calendar
*/
public class DeleteCalendarJobRequest extends ActionRequest {
private final List<String> jobIds;
private final String calendarId;
/**
* Create a new request referencing an existing Calendar and which JobIds to remove
* from it.
*
* @param calendarId The non-null ID of the calendar
* @param jobIds JobIds to remove from the calendar, cannot be empty, or contain null values.
* It can be a list of jobs or groups.
*/
public DeleteCalendarJobRequest(String calendarId, String... jobIds) {
this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null.");
if (jobIds.length == 0) {
throw new InvalidParameterException("jobIds must not be empty.");
}
if (Arrays.stream(jobIds).anyMatch(Objects::isNull)) {
throw new NullPointerException("jobIds must not contain null values.");
}
this.jobIds = Arrays.asList(jobIds);
}
public List<String> getJobIds() {
return jobIds;
}
public String getCalendarId() {
return calendarId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobIds, calendarId);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DeleteCalendarJobRequest that = (DeleteCalendarJobRequest) other;
return Objects.equals(jobIds, that.jobIds) &&
Objects.equals(calendarId, that.calendarId);
}
}

View File

@ -0,0 +1,60 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.Validatable;
import java.util.Objects;
/**
* A request to delete a machine learning filter
*/
public class DeleteFilterRequest implements Validatable {
private final String filterId;
public DeleteFilterRequest(String filterId) {
this.filterId = Objects.requireNonNull(filterId, "[filter_id] is required");
}
public String getId() {
return filterId;
}
@Override
public int hashCode() {
return Objects.hash(filterId);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final DeleteFilterRequest other = (DeleteFilterRequest) obj;
return Objects.equals(filterId, other.filterId);
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import java.util.Objects;
/**
* Request to delete a Machine Learning Model Snapshot Job via its Job and Snapshot IDs
*/
public class DeleteModelSnapshotRequest extends ActionRequest {
private final String jobId;
private final String snapshotId;
public DeleteModelSnapshotRequest(String jobId, String snapshotId) {
this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null");
this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null");
}
public String getJobId() {
return jobId;
}
public String getSnapshotId() {
return snapshotId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobId, snapshotId);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != getClass()) {
return false;
}
DeleteModelSnapshotRequest other = (DeleteModelSnapshotRequest) obj;
return Objects.equals(jobId, other.jobId) && Objects.equals(snapshotId, other.snapshotId);
}
}

View File

@ -0,0 +1,169 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
/**
* Get the Scheduled Events for a Calendar
*/
public class GetCalendarEventsRequest extends ActionRequest implements ToXContentObject {
public static final ParseField START = new ParseField("start");
public static final ParseField END = new ParseField("end");
public static final ConstructingObjectParser<GetCalendarEventsRequest, Void> PARSER =
new ConstructingObjectParser<>("get_calendar_events_request", a -> new GetCalendarEventsRequest((String)a[0]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID);
PARSER.declareString(GetCalendarEventsRequest::setStart, START);
PARSER.declareString(GetCalendarEventsRequest::setEnd, END);
PARSER.declareString(GetCalendarEventsRequest::setJobId, Job.ID);
PARSER.declareObject(GetCalendarEventsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE);
}
private final String calendarId;
private String start;
private String end;
private String jobId;
private PageParams pageParams;
/**
* Create a new request to get the ScheduledEvents for the given calendarId.
*
* @param calendarId The ID of the calendar.
* Can be `_all` to get ALL ScheduledEvents for all calendars.
*/
public GetCalendarEventsRequest(String calendarId) {
this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null.");
}
public String getCalendarId() {
return calendarId;
}
public PageParams getPageParams() {
return pageParams;
}
/**
* The paging parameters for the gathered ScheduledEvents
* @param pageParams The desired paging params
*/
public void setPageParams(PageParams pageParams) {
this.pageParams = pageParams;
}
public String getStart() {
return start;
}
/**
* Specifies to get events with timestamps after this time.
*
* @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
*/
public void setStart(String start) {
this.start = start;
}
public String getEnd() {
return end;
}
/**
* Specifies to get events with timestamps earlier than this time.
*
* @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
*/
public void setEnd(String end) {
this.end = end;
}
public String getJobId() {
return jobId;
}
/**
* The jobId for which to get the ScheduledEvents. When this option is used calendarId must be `_all`
* @param jobId The job for which to get the events.
*/
public void setJobId(String jobId) {
this.jobId = jobId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Calendar.ID.getPreferredName(), calendarId);
if (start != null) {
builder.field(START.getPreferredName(), start);
}
if (end != null) {
builder.field(END.getPreferredName(), end);
}
if (jobId != null) {
builder.field(Job.ID.getPreferredName(), jobId);
}
if (pageParams != null) {
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(calendarId, start, end, jobId, pageParams);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GetCalendarEventsRequest other = (GetCalendarEventsRequest) obj;
return Objects.equals(calendarId, other.calendarId)
&& Objects.equals(pageParams, other.pageParams)
&& Objects.equals(start, other.start)
&& Objects.equals(end, other.end)
&& Objects.equals(jobId, other.jobId);
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Contains a {@link List} of the found {@link ScheduledEvent} objects and the total count found
*/
public class GetCalendarEventsResponse extends AbstractResultResponse<ScheduledEvent> {
public static final ParseField RESULTS_FIELD = new ParseField("events");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<GetCalendarEventsResponse, Void> PARSER =
new ConstructingObjectParser<>("calendar_events_response", true,
a -> new GetCalendarEventsResponse((List<ScheduledEvent>) a[0], (long) a[1]));
static {
PARSER.declareObjectArray(constructorArg(), ScheduledEvent.PARSER, RESULTS_FIELD);
PARSER.declareLong(constructorArg(), COUNT);
}
GetCalendarEventsResponse(List<ScheduledEvent> events, long count) {
super(RESULTS_FIELD, events, count);
}
/**
* The collection of {@link ScheduledEvent} objects found in the query
*/
public List<ScheduledEvent> events() {
return results;
}
public static GetCalendarEventsResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public int hashCode() {
return Objects.hash(results, count);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
GetCalendarEventsResponse other = (GetCalendarEventsResponse) obj;
return Objects.equals(results, other.results) && count == other.count;
}
@Override
public final String toString() {
return Strings.toString(this);
}
}

View File

@ -0,0 +1,113 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* Request to add a ScheduledEvent to a Machine Learning calendar
*/
public class PostCalendarEventRequest extends ActionRequest implements ToXContentObject {
private final String calendarId;
private final List<ScheduledEvent> scheduledEvents;
public static final String INCLUDE_CALENDAR_ID_KEY = "include_calendar_id";
public static final ParseField EVENTS = new ParseField("events");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<PostCalendarEventRequest, Void> PARSER =
new ConstructingObjectParser<>("post_calendar_event_request",
a -> new PostCalendarEventRequest((String)a[0], (List<ScheduledEvent>)a[1]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID);
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(),
(p, c) -> ScheduledEvent.PARSER.apply(p, null), EVENTS);
}
public static final MapParams EXCLUDE_CALENDAR_ID_PARAMS =
new MapParams(Collections.singletonMap(INCLUDE_CALENDAR_ID_KEY, Boolean.toString(false)));
/**
* Create a new PostCalendarEventRequest with an existing non-null calendarId and a list of Scheduled events
*
* @param calendarId The ID of the calendar, must be non-null
* @param scheduledEvents The non-null, non-empty, list of {@link ScheduledEvent} objects to add to the calendar
*/
public PostCalendarEventRequest(String calendarId, List<ScheduledEvent> scheduledEvents) {
this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null.");
this.scheduledEvents = Objects.requireNonNull(scheduledEvents, "[events] must not be null.");
if (scheduledEvents.isEmpty()) {
throw new IllegalArgumentException("At least 1 event is required");
}
}
public String getCalendarId() {
return calendarId;
}
public List<ScheduledEvent> getScheduledEvents() {
return scheduledEvents;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (params.paramAsBoolean(INCLUDE_CALENDAR_ID_KEY, true)) {
builder.field(Calendar.ID.getPreferredName(), calendarId);
}
builder.field(EVENTS.getPreferredName(), scheduledEvents);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(calendarId, scheduledEvents);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
PostCalendarEventRequest other = (PostCalendarEventRequest) obj;
return Objects.equals(calendarId, other.calendarId) && Objects.equals(scheduledEvents, other.scheduledEvents);
}
}

View File

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
/**
* Response to adding ScheduledEvent(s) to a Machine Learning calendar
*/
public class PostCalendarEventResponse extends ActionResponse implements ToXContentObject {
private final List<ScheduledEvent> scheduledEvents;
public static final ParseField EVENTS = new ParseField("events");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<PostCalendarEventResponse, Void> PARSER =
new ConstructingObjectParser<>("post_calendar_event_response",
true,
a -> new PostCalendarEventResponse((List<ScheduledEvent>)a[0]));
static {
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(),
(p, c) -> ScheduledEvent.PARSER.apply(p, null), EVENTS);
}
public static PostCalendarEventResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
/**
* Create a new PostCalendarEventResponse containing the scheduled Events
*
* @param scheduledEvents The list of {@link ScheduledEvent} objects
*/
public PostCalendarEventResponse(List<ScheduledEvent> scheduledEvents) {
this.scheduledEvents = scheduledEvents;
}
public List<ScheduledEvent> getScheduledEvents() {
return scheduledEvents;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(EVENTS.getPreferredName(), scheduledEvents);
builder.endObject();
return builder;
}
@Override
public int hashCode(){
return Objects.hash(scheduledEvents);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
PostCalendarEventResponse other = (PostCalendarEventResponse) obj;
return Objects.equals(scheduledEvents, other.scheduledEvents);
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import java.security.InvalidParameterException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
/**
* Request class for adding Machine Learning Jobs to an existing calendar
*/
public class PutCalendarJobRequest extends ActionRequest {
private final List<String> jobIds;
private final String calendarId;
/**
* Create a new request referencing an existing Calendar and which JobIds to add
* to it.
*
* @param calendarId The non-null ID of the calendar
* @param jobIds JobIds to add to the calendar, cannot be empty, or contain null values.
* It can be a list of jobs or groups.
*/
public PutCalendarJobRequest(String calendarId, String... jobIds) {
this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null.");
if (jobIds.length == 0) {
throw new InvalidParameterException("jobIds must not be empty.");
}
if (Arrays.stream(jobIds).anyMatch(Objects::isNull)) {
throw new NullPointerException("jobIds must not contain null values.");
}
this.jobIds = Arrays.asList(jobIds);
}
public List<String> getJobIds() {
return jobIds;
}
public String getCalendarId() {
return calendarId;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public int hashCode() {
return Objects.hash(jobIds, calendarId);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
PutCalendarJobRequest that = (PutCalendarJobRequest) other;
return Objects.equals(jobIds, that.jobIds) &&
Objects.equals(calendarId, that.calendarId);
}
}

View File

@ -0,0 +1,120 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
/**
* A request to revert to a specific model snapshot for a given job
*/
public class RevertModelSnapshotRequest extends ActionRequest implements ToXContentObject {
public static final ParseField DELETE_INTERVENING = new ParseField("delete_intervening_results");
public static final ConstructingObjectParser<RevertModelSnapshotRequest, Void> PARSER = new ConstructingObjectParser<>(
"revert_model_snapshots_request", a -> new RevertModelSnapshotRequest((String) a[0], (String) a[1]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), ModelSnapshot.SNAPSHOT_ID);
PARSER.declareBoolean(RevertModelSnapshotRequest::setDeleteInterveningResults, DELETE_INTERVENING);
}
private final String jobId;
private final String snapshotId;
private Boolean deleteInterveningResults;
/**
* Constructs a request to revert to a given model snapshot
* @param jobId id of the job for which to revert the model snapshot
* @param snapshotId id of the snapshot to which to revert
*/
public RevertModelSnapshotRequest(String jobId, String snapshotId) {
this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null");
this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null");
}
public String getJobId() {
return jobId;
}
public String getSnapshotId() {
return snapshotId;
}
public Boolean getDeleteInterveningResults() {
return deleteInterveningResults;
}
/**
* Sets the request flag that indicates whether or not intervening results should be deleted.
* @param deleteInterveningResults Flag that indicates whether or not intervening results should be deleted.
*/
public void setDeleteInterveningResults(Boolean deleteInterveningResults) {
this.deleteInterveningResults = deleteInterveningResults;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId);
if (deleteInterveningResults != null) {
builder.field(DELETE_INTERVENING.getPreferredName(), deleteInterveningResults);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
RevertModelSnapshotRequest request = (RevertModelSnapshotRequest) obj;
return Objects.equals(jobId, request.jobId)
&& Objects.equals(snapshotId, request.snapshotId)
&& Objects.equals(deleteInterveningResults, request.deleteInterveningResults);
}
@Override
public int hashCode() {
return Objects.hash(jobId, snapshotId, deleteInterveningResults);
}
}

View File

@ -0,0 +1,92 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* A response containing the reverted model snapshot
*/
public class RevertModelSnapshotResponse extends ActionResponse implements ToXContentObject {
private static final ParseField MODEL = new ParseField("model");
public static final ConstructingObjectParser<RevertModelSnapshotResponse, Void> PARSER =
new ConstructingObjectParser<>("revert_model_snapshot_response", true,
a -> new RevertModelSnapshotResponse((ModelSnapshot.Builder) a[0]));
static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, MODEL);
}
public static RevertModelSnapshotResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public RevertModelSnapshotResponse(ModelSnapshot.Builder modelSnapshot) {
this.model = modelSnapshot.build();
}
private final ModelSnapshot model;
/**
* Get full information about the reverted model snapshot
* @return the reverted model snapshot.
*/
public ModelSnapshot getModel() {
return model;
}
@Override
public int hashCode() {
return Objects.hash(model);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
RevertModelSnapshotResponse other = (RevertModelSnapshotResponse) obj;
return Objects.equals(model, other.model);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (model != null) {
builder.field(MODEL.getPreferredName(), model);
}
builder.endObject();
return builder;
}
}

View File

@ -0,0 +1,135 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
/**
* A request to update information about an existing model snapshot for a given job
*/
public class UpdateModelSnapshotRequest extends ActionRequest implements ToXContentObject {
public static final ConstructingObjectParser<UpdateModelSnapshotRequest, Void> PARSER = new ConstructingObjectParser<>(
"update_model_snapshot_request", a -> new UpdateModelSnapshotRequest((String) a[0], (String) a[1]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
PARSER.declareString(ConstructingObjectParser.constructorArg(), ModelSnapshot.SNAPSHOT_ID);
PARSER.declareStringOrNull(UpdateModelSnapshotRequest::setDescription, ModelSnapshot.DESCRIPTION);
PARSER.declareBoolean(UpdateModelSnapshotRequest::setRetain, ModelSnapshot.RETAIN);
}
private final String jobId;
private String snapshotId;
private String description;
private Boolean retain;
/**
* Constructs a request to update information for a snapshot of given job
* @param jobId id of the job from which to retrieve results
* @param snapshotId id of the snapshot from which to retrieve results
*/
public UpdateModelSnapshotRequest(String jobId, String snapshotId) {
this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null");
this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null");
}
public String getJobId() {
return jobId;
}
public String getSnapshotId() {
return snapshotId;
}
public String getDescription() {
return description;
}
/**
* The new description of the snapshot.
* @param description the updated snapshot description
*/
public void setDescription(String description) {
this.description = description;
}
public Boolean getRetain() {
return retain;
}
/**
* The new value of the "retain" property of the snapshot
* @param retain the updated retain property
*/
public void setRetain(boolean retain) {
this.retain = retain;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId);
if (description != null) {
builder.field(ModelSnapshot.DESCRIPTION.getPreferredName(), description);
}
if (retain != null) {
builder.field(ModelSnapshot.RETAIN.getPreferredName(), retain);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
UpdateModelSnapshotRequest request = (UpdateModelSnapshotRequest) obj;
return Objects.equals(jobId, request.jobId)
&& Objects.equals(snapshotId, request.snapshotId)
&& Objects.equals(description, request.description)
&& Objects.equals(retain, request.retain);
}
@Override
public int hashCode() {
return Objects.hash(jobId, snapshotId, description, retain);
}
}

View File

@ -0,0 +1,109 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* A response acknowledging the update of information for an existing model snapshot for a given job
*/
public class UpdateModelSnapshotResponse extends ActionResponse implements ToXContentObject {
private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
private static final ParseField MODEL = new ParseField("model");
public UpdateModelSnapshotResponse(boolean acknowledged, ModelSnapshot.Builder modelSnapshot) {
this.acknowledged = acknowledged;
this.model = modelSnapshot.build();
}
public static final ConstructingObjectParser<UpdateModelSnapshotResponse, Void> PARSER =
new ConstructingObjectParser<>("update_model_snapshot_response", true,
a -> new UpdateModelSnapshotResponse((Boolean) a[0], ((ModelSnapshot.Builder) a[1])));
static {
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, MODEL);
}
public static UpdateModelSnapshotResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final Boolean acknowledged;
private final ModelSnapshot model;
/**
* Get the action acknowledgement
* @return a {@code boolean} that indicates whether the model snapshot was updated successfully.
*/
public Boolean getAcknowledged() {
return acknowledged;
}
/**
* Get the updated snapshot of the model
* @return the updated model snapshot.
*/
public ModelSnapshot getModel() {
return model;
}
@Override
public int hashCode() {
return Objects.hash(acknowledged, model);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
if (acknowledged != null) {
builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged);
}
if (model != null) {
builder.field(MODEL.getPreferredName(), model);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
UpdateModelSnapshotResponse request = (UpdateModelSnapshotResponse) obj;
return Objects.equals(acknowledged, request.acknowledged)
&& Objects.equals(model, request.model);
}
}

View File

@ -62,6 +62,7 @@ public class DatafeedConfig implements ToXContentObject {
public static final ParseField AGGREGATIONS = new ParseField("aggregations");
public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields");
public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config");
public static final ParseField DELAYED_DATA_CHECK_CONFIG = new ParseField("delayed_data_check_config");
public static final ConstructingObjectParser<Builder, Void> PARSER = new ConstructingObjectParser<>(
"datafeed_config", true, a -> new Builder((String)a[0], (String)a[1]));
@ -88,6 +89,7 @@ public class DatafeedConfig implements ToXContentObject {
}, SCRIPT_FIELDS);
PARSER.declareInt(Builder::setScrollSize, SCROLL_SIZE);
PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, CHUNKING_CONFIG);
PARSER.declareObject(Builder::setDelayedDataCheckConfig, DelayedDataCheckConfig.PARSER, DELAYED_DATA_CHECK_CONFIG);
}
private static BytesReference parseBytes(XContentParser parser) throws IOException {
@ -107,10 +109,12 @@ public class DatafeedConfig implements ToXContentObject {
private final List<SearchSourceBuilder.ScriptField> scriptFields;
private final Integer scrollSize;
private final ChunkingConfig chunkingConfig;
private final DelayedDataCheckConfig delayedDataCheckConfig;
private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, List<String> types,
BytesReference query, BytesReference aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
Integer scrollSize, ChunkingConfig chunkingConfig) {
Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
this.id = id;
this.jobId = jobId;
this.queryDelay = queryDelay;
@ -122,6 +126,7 @@ public class DatafeedConfig implements ToXContentObject {
this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields);
this.scrollSize = scrollSize;
this.chunkingConfig = chunkingConfig;
this.delayedDataCheckConfig = delayedDataCheckConfig;
}
public String getId() {
@ -168,6 +173,10 @@ public class DatafeedConfig implements ToXContentObject {
return chunkingConfig;
}
public DelayedDataCheckConfig getDelayedDataCheckConfig() {
return delayedDataCheckConfig;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
@ -204,6 +213,9 @@ public class DatafeedConfig implements ToXContentObject {
if (chunkingConfig != null) {
builder.field(CHUNKING_CONFIG.getPreferredName(), chunkingConfig);
}
if (delayedDataCheckConfig != null) {
builder.field(DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig);
}
builder.endObject();
return builder;
@ -244,7 +256,8 @@ public class DatafeedConfig implements ToXContentObject {
&& Objects.equals(this.scrollSize, that.scrollSize)
&& Objects.equals(asMap(this.aggregations), asMap(that.aggregations))
&& Objects.equals(this.scriptFields, that.scriptFields)
&& Objects.equals(this.chunkingConfig, that.chunkingConfig);
&& Objects.equals(this.chunkingConfig, that.chunkingConfig)
&& Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig);
}
/**
@ -255,7 +268,7 @@ public class DatafeedConfig implements ToXContentObject {
@Override
public int hashCode() {
return Objects.hash(id, jobId, frequency, queryDelay, indices, types, asMap(query), scrollSize, asMap(aggregations), scriptFields,
chunkingConfig);
chunkingConfig, delayedDataCheckConfig);
}
public static Builder builder(String id, String jobId) {
@ -275,6 +288,7 @@ public class DatafeedConfig implements ToXContentObject {
private List<SearchSourceBuilder.ScriptField> scriptFields;
private Integer scrollSize;
private ChunkingConfig chunkingConfig;
private DelayedDataCheckConfig delayedDataCheckConfig;
public Builder(String id, String jobId) {
this.id = Objects.requireNonNull(id, ID.getPreferredName());
@ -293,6 +307,7 @@ public class DatafeedConfig implements ToXContentObject {
this.scriptFields = config.scriptFields;
this.scrollSize = config.scrollSize;
this.chunkingConfig = config.chunkingConfig;
this.delayedDataCheckConfig = config.getDelayedDataCheckConfig();
}
public Builder setIndices(List<String> indices) {
@ -366,9 +381,23 @@ public class DatafeedConfig implements ToXContentObject {
return this;
}
/**
* This sets the {@link DelayedDataCheckConfig} settings.
*
* See {@link DelayedDataCheckConfig} for more information.
*
* @param delayedDataCheckConfig the delayed data check configuration
* Default value is enabled, with `check_window` being null. This means the true window is
* calculated when the real-time Datafeed runs.
*/
public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheckConfig) {
this.delayedDataCheckConfig = delayedDataCheckConfig;
return this;
}
public DatafeedConfig build() {
return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize,
chunkingConfig);
chunkingConfig, delayedDataCheckConfig);
}
private static BytesReference xContentToBytes(ToXContentObject object) throws IOException {

View File

@ -77,6 +77,9 @@ public class DatafeedUpdate implements ToXContentObject {
}, DatafeedConfig.SCRIPT_FIELDS);
PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE);
PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, DatafeedConfig.CHUNKING_CONFIG);
PARSER.declareObject(Builder::setDelayedDataCheckConfig,
DelayedDataCheckConfig.PARSER,
DatafeedConfig.DELAYED_DATA_CHECK_CONFIG);
}
private static BytesReference parseBytes(XContentParser parser) throws IOException {
@ -96,10 +99,11 @@ public class DatafeedUpdate implements ToXContentObject {
private final List<SearchSourceBuilder.ScriptField> scriptFields;
private final Integer scrollSize;
private final ChunkingConfig chunkingConfig;
private final DelayedDataCheckConfig delayedDataCheckConfig;
private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, List<String> types,
BytesReference query, BytesReference aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
Integer scrollSize, ChunkingConfig chunkingConfig) {
Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
this.id = id;
this.jobId = jobId;
this.queryDelay = queryDelay;
@ -111,6 +115,7 @@ public class DatafeedUpdate implements ToXContentObject {
this.scriptFields = scriptFields;
this.scrollSize = scrollSize;
this.chunkingConfig = chunkingConfig;
this.delayedDataCheckConfig = delayedDataCheckConfig;
}
/**
@ -146,6 +151,9 @@ public class DatafeedUpdate implements ToXContentObject {
}
builder.endObject();
}
if (delayedDataCheckConfig != null) {
builder.field(DatafeedConfig.DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig);
}
addOptionalField(builder, DatafeedConfig.SCROLL_SIZE, scrollSize);
addOptionalField(builder, DatafeedConfig.CHUNKING_CONFIG, chunkingConfig);
builder.endObject();
@ -198,6 +206,10 @@ public class DatafeedUpdate implements ToXContentObject {
return chunkingConfig;
}
public DelayedDataCheckConfig getDelayedDataCheckConfig() {
return delayedDataCheckConfig;
}
private static Map<String, Object> asMap(BytesReference bytesReference) {
return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, XContentType.JSON).v2();
}
@ -232,6 +244,7 @@ public class DatafeedUpdate implements ToXContentObject {
&& Objects.equals(asMap(this.query), asMap(that.query))
&& Objects.equals(this.scrollSize, that.scrollSize)
&& Objects.equals(asMap(this.aggregations), asMap(that.aggregations))
&& Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig)
&& Objects.equals(this.scriptFields, that.scriptFields)
&& Objects.equals(this.chunkingConfig, that.chunkingConfig);
}
@ -244,7 +257,7 @@ public class DatafeedUpdate implements ToXContentObject {
@Override
public int hashCode() {
return Objects.hash(id, jobId, frequency, queryDelay, indices, types, asMap(query), scrollSize, asMap(aggregations), scriptFields,
chunkingConfig);
chunkingConfig, delayedDataCheckConfig);
}
public static Builder builder(String id) {
@ -264,6 +277,7 @@ public class DatafeedUpdate implements ToXContentObject {
private List<SearchSourceBuilder.ScriptField> scriptFields;
private Integer scrollSize;
private ChunkingConfig chunkingConfig;
private DelayedDataCheckConfig delayedDataCheckConfig;
public Builder(String id) {
this.id = Objects.requireNonNull(id, DatafeedConfig.ID.getPreferredName());
@ -281,6 +295,7 @@ public class DatafeedUpdate implements ToXContentObject {
this.scriptFields = config.scriptFields;
this.scrollSize = config.scrollSize;
this.chunkingConfig = config.chunkingConfig;
this.delayedDataCheckConfig = config.delayedDataCheckConfig;
}
public Builder setJobId(String jobId) {
@ -359,9 +374,14 @@ public class DatafeedUpdate implements ToXContentObject {
return this;
}
public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheckConfig) {
this.delayedDataCheckConfig = delayedDataCheckConfig;
return this;
}
public DatafeedUpdate build() {
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize,
chunkingConfig);
chunkingConfig, delayedDataCheckConfig);
}
private static BytesReference xContentToBytes(ToXContentObject object) throws IOException {

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.datafeed;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* The configuration object containing the delayed data check settings.
*
* See {@link DelayedDataCheckConfig#enabledDelayedDataCheckConfig(TimeValue)} for creating a new
* enabled datacheck with the given check_window
*
* See {@link DelayedDataCheckConfig#disabledDelayedDataCheckConfig()} for creating a config for disabling
* delayed data checking.
*/
public class DelayedDataCheckConfig implements ToXContentObject {
public static final ParseField ENABLED = new ParseField("enabled");
public static final ParseField CHECK_WINDOW = new ParseField("check_window");
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
public static final ConstructingObjectParser<DelayedDataCheckConfig, Void> PARSER = new ConstructingObjectParser<>(
"delayed_data_check_config", true, a -> new DelayedDataCheckConfig((Boolean) a[0], (TimeValue) a[1]));
static {
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED);
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return TimeValue.parseTimeValue(p.text(), CHECK_WINDOW.getPreferredName());
}
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
}, CHECK_WINDOW, ObjectParser.ValueType.STRING);
}
/**
* This creates a new DelayedDataCheckConfig that has a check_window of the passed `timeValue`
*
* We query the index to the latest finalized bucket from this TimeValue in the past looking to see if any data has been indexed
* since the data was read with the Datafeed.
*
* The window must be larger than the {@link org.elasticsearch.client.ml.job.config.AnalysisConfig#bucketSpan}, less than
* 24 hours, and span less than 10,000x buckets.
*
*
* @param timeValue The time length in the past from the latest finalized bucket to look for latent data.
* If `null` is provided, the appropriate window is calculated when it is used
**/
public static DelayedDataCheckConfig enabledDelayedDataCheckConfig(TimeValue timeValue) {
return new DelayedDataCheckConfig(true, timeValue);
}
/**
* This creates a new DelayedDataCheckConfig that disables the data check.
*/
public static DelayedDataCheckConfig disabledDelayedDataCheckConfig() {
return new DelayedDataCheckConfig(false, null);
}
private final boolean enabled;
private final TimeValue checkWindow;
DelayedDataCheckConfig(Boolean enabled, TimeValue checkWindow) {
this.enabled = enabled;
this.checkWindow = checkWindow;
}
public boolean isEnabled() {
return enabled;
}
@Nullable
public TimeValue getCheckWindow() {
return checkWindow;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ENABLED.getPreferredName(), enabled);
if (checkWindow != null) {
builder.field(CHECK_WINDOW.getPreferredName(), checkWindow.getStringRep());
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(enabled, checkWindow);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
DelayedDataCheckConfig other = (DelayedDataCheckConfig) obj;
return Objects.equals(this.enabled, other.enabled) && Objects.equals(this.checkWindow, other.checkWindow);
}
}

View File

@ -60,8 +60,6 @@ public class AnalysisConfig implements ToXContentObject {
public static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name");
public static final ParseField DETECTORS = new ParseField("detectors");
public static final ParseField INFLUENCERS = new ParseField("influencers");
public static final ParseField OVERLAPPING_BUCKETS = new ParseField("overlapping_buckets");
public static final ParseField RESULT_FINALIZATION_WINDOW = new ParseField("result_finalization_window");
public static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields");
@SuppressWarnings("unchecked")
@ -84,8 +82,6 @@ public class AnalysisConfig implements ToXContentObject {
builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY);
PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME);
PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS);
PARSER.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS);
PARSER.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW);
PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS);
}
@ -100,14 +96,11 @@ public class AnalysisConfig implements ToXContentObject {
private final String summaryCountFieldName;
private final List<Detector> detectors;
private final List<String> influencers;
private final Boolean overlappingBuckets;
private final Long resultFinalizationWindow;
private final Boolean multivariateByFields;
private AnalysisConfig(TimeValue bucketSpan, String categorizationFieldName, List<String> categorizationFilters,
CategorizationAnalyzerConfig categorizationAnalyzerConfig, TimeValue latency, String summaryCountFieldName,
List<Detector> detectors, List<String> influencers, Boolean overlappingBuckets, Long resultFinalizationWindow,
Boolean multivariateByFields) {
List<Detector> detectors, List<String> influencers, Boolean multivariateByFields) {
this.detectors = Collections.unmodifiableList(detectors);
this.bucketSpan = bucketSpan;
this.latency = latency;
@ -116,8 +109,6 @@ public class AnalysisConfig implements ToXContentObject {
this.categorizationFilters = categorizationFilters == null ? null : Collections.unmodifiableList(categorizationFilters);
this.summaryCountFieldName = summaryCountFieldName;
this.influencers = Collections.unmodifiableList(influencers);
this.overlappingBuckets = overlappingBuckets;
this.resultFinalizationWindow = resultFinalizationWindow;
this.multivariateByFields = multivariateByFields;
}
@ -177,14 +168,6 @@ public class AnalysisConfig implements ToXContentObject {
return influencers;
}
public Boolean getOverlappingBuckets() {
return overlappingBuckets;
}
public Long getResultFinalizationWindow() {
return resultFinalizationWindow;
}
public Boolean getMultivariateByFields() {
return multivariateByFields;
}
@ -255,12 +238,6 @@ public class AnalysisConfig implements ToXContentObject {
}
builder.endArray();
builder.field(INFLUENCERS.getPreferredName(), influencers);
if (overlappingBuckets != null) {
builder.field(OVERLAPPING_BUCKETS.getPreferredName(), overlappingBuckets);
}
if (resultFinalizationWindow != null) {
builder.field(RESULT_FINALIZATION_WINDOW.getPreferredName(), resultFinalizationWindow);
}
if (multivariateByFields != null) {
builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields);
}
@ -287,8 +264,6 @@ public class AnalysisConfig implements ToXContentObject {
Objects.equals(summaryCountFieldName, that.summaryCountFieldName) &&
Objects.equals(detectors, that.detectors) &&
Objects.equals(influencers, that.influencers) &&
Objects.equals(overlappingBuckets, that.overlappingBuckets) &&
Objects.equals(resultFinalizationWindow, that.resultFinalizationWindow) &&
Objects.equals(multivariateByFields, that.multivariateByFields);
}
@ -296,8 +271,7 @@ public class AnalysisConfig implements ToXContentObject {
public int hashCode() {
return Objects.hash(
bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig, latency,
summaryCountFieldName, detectors, influencers, overlappingBuckets, resultFinalizationWindow,
multivariateByFields);
summaryCountFieldName, detectors, influencers, multivariateByFields);
}
public static Builder builder(List<Detector> detectors) {
@ -314,8 +288,6 @@ public class AnalysisConfig implements ToXContentObject {
private CategorizationAnalyzerConfig categorizationAnalyzerConfig;
private String summaryCountFieldName;
private List<String> influencers = new ArrayList<>();
private Boolean overlappingBuckets;
private Long resultFinalizationWindow;
private Boolean multivariateByFields;
public Builder(List<Detector> detectors) {
@ -332,8 +304,6 @@ public class AnalysisConfig implements ToXContentObject {
this.categorizationAnalyzerConfig = analysisConfig.categorizationAnalyzerConfig;
this.summaryCountFieldName = analysisConfig.summaryCountFieldName;
this.influencers = new ArrayList<>(analysisConfig.influencers);
this.overlappingBuckets = analysisConfig.overlappingBuckets;
this.resultFinalizationWindow = analysisConfig.resultFinalizationWindow;
this.multivariateByFields = analysisConfig.multivariateByFields;
}
@ -391,16 +361,6 @@ public class AnalysisConfig implements ToXContentObject {
return this;
}
public Builder setOverlappingBuckets(Boolean overlappingBuckets) {
this.overlappingBuckets = overlappingBuckets;
return this;
}
public Builder setResultFinalizationWindow(Long resultFinalizationWindow) {
this.resultFinalizationWindow = resultFinalizationWindow;
return this;
}
public Builder setMultivariateByFields(Boolean multivariateByFields) {
this.multivariateByFields = multivariateByFields;
return this;
@ -409,8 +369,7 @@ public class AnalysisConfig implements ToXContentObject {
public AnalysisConfig build() {
return new AnalysisConfig(bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig,
latency, summaryCountFieldName, detectors, influencers, overlappingBuckets,
resultFinalizationWindow, multivariateByFields);
latency, summaryCountFieldName, detectors, influencers, multivariateByFields);
}
}
}

View File

@ -176,6 +176,10 @@ public class ModelSnapshot implements ToXContentObject {
return quantiles;
}
public boolean getRetain() {
return retain;
}
public Date getLatestRecordTimeStamp() {
return latestRecordTimeStamp;
}

View File

@ -19,12 +19,15 @@
package org.elasticsearch.client.rollup;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.unit.TimeValue;
import java.util.Objects;
public class StopRollupJobRequest implements Validatable {
private final String jobId;
private TimeValue timeout;
private Boolean waitForCompletion;
public StopRollupJobRequest(final String jobId) {
this.jobId = Objects.requireNonNull(jobId, "id parameter must not be null");
@ -46,4 +49,26 @@ public class StopRollupJobRequest implements Validatable {
public int hashCode() {
return Objects.hash(jobId);
}
/**
* Sets the requests optional "timeout" parameter.
*/
public void timeout(TimeValue timeout) {
this.timeout = timeout;
}
public TimeValue timeout() {
return this.timeout;
}
/**
* Sets the requests optional "wait_for_completion".
*/
public void waitForCompletion(boolean waitForCompletion) {
this.waitForCompletion = waitForCompletion;
}
public Boolean waitForCompletion() {
return this.waitForCompletion;
}
}

View File

@ -0,0 +1,96 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.security.user.privileges.ApplicationResourcePrivileges;
import org.elasticsearch.client.security.user.privileges.IndicesPrivileges;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
import java.util.Set;
import static java.util.Collections.emptySet;
import static java.util.Collections.unmodifiableSet;
/**
* Request to determine whether the current user has a list of privileges.
*/
public final class HasPrivilegesRequest implements Validatable, ToXContentObject {
private final Set<String> clusterPrivileges;
private final Set<IndicesPrivileges> indexPrivileges;
private final Set<ApplicationResourcePrivileges> applicationPrivileges;
public HasPrivilegesRequest(@Nullable Set<String> clusterPrivileges,
@Nullable Set<IndicesPrivileges> indexPrivileges,
@Nullable Set<ApplicationResourcePrivileges> applicationPrivileges) {
this.clusterPrivileges = clusterPrivileges == null ? emptySet() : unmodifiableSet(clusterPrivileges);
this.indexPrivileges = indexPrivileges == null ? emptySet() : unmodifiableSet(indexPrivileges);
this.applicationPrivileges = applicationPrivileges == null ? emptySet() : unmodifiableSet(applicationPrivileges);
if (this.clusterPrivileges.isEmpty() && this.indexPrivileges.isEmpty() && this.applicationPrivileges.isEmpty()) {
throw new IllegalArgumentException("At last 1 privilege must be specified");
}
}
public Set<String> getClusterPrivileges() {
return clusterPrivileges;
}
public Set<IndicesPrivileges> getIndexPrivileges() {
return indexPrivileges;
}
public Set<ApplicationResourcePrivileges> getApplicationPrivileges() {
return applicationPrivileges;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject()
.field("cluster", clusterPrivileges)
.field("index", indexPrivileges)
.field("application", applicationPrivileges)
.endObject();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final HasPrivilegesRequest that = (HasPrivilegesRequest) o;
return Objects.equals(clusterPrivileges, that.clusterPrivileges) &&
Objects.equals(indexPrivileges, that.indexPrivileges) &&
Objects.equals(applicationPrivileges, that.applicationPrivileges);
}
@Override
public int hashCode() {
return Objects.hash(clusterPrivileges, indexPrivileges, applicationPrivileges);
}
}

View File

@ -0,0 +1,252 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.function.BiConsumer;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Response when checking whether the current user has a defined set of privileges.
*/
public final class HasPrivilegesResponse {
private static final ConstructingObjectParser<HasPrivilegesResponse, Void> PARSER = new ConstructingObjectParser<>(
"has_privileges_response", true, args -> new HasPrivilegesResponse(
(String) args[0], (Boolean) args[1], checkMap(args[2], 0), checkMap(args[3], 1), checkMap(args[4], 2)));
static {
PARSER.declareString(constructorArg(), new ParseField("username"));
PARSER.declareBoolean(constructorArg(), new ParseField("has_all_requested"));
declareMap(constructorArg(), "cluster");
declareMap(constructorArg(), "index");
declareMap(constructorArg(), "application");
}
@SuppressWarnings("unchecked")
private static <T> Map<String, T> checkMap(Object argument, int depth) {
if (argument instanceof Map) {
Map<String, T> map = (Map<String, T>) argument;
if (depth == 0) {
map.values().stream()
.filter(val -> (val instanceof Boolean) == false)
.forEach(val -> {
throw new IllegalArgumentException("Map value [" + val + "] in [" + map + "] is not a Boolean");
});
} else {
map.values().stream().forEach(val -> checkMap(val, depth - 1));
}
return map;
}
throw new IllegalArgumentException("Value [" + argument + "] is not an Object");
}
private static void declareMap(BiConsumer<HasPrivilegesResponse, Map<String, Object>> arg, String name) {
PARSER.declareField(arg, XContentParser::map, new ParseField(name), ObjectParser.ValueType.OBJECT);
}
private final String username;
private final boolean hasAllRequested;
private final Map<String, Boolean> clusterPrivileges;
private final Map<String, Map<String, Boolean>> indexPrivileges;
private final Map<String, Map<String, Map<String, Boolean>>> applicationPrivileges;
public HasPrivilegesResponse(String username, boolean hasAllRequested,
Map<String, Boolean> clusterPrivileges,
Map<String, Map<String, Boolean>> indexPrivileges,
Map<String, Map<String, Map<String, Boolean>>> applicationPrivileges) {
this.username = username;
this.hasAllRequested = hasAllRequested;
this.clusterPrivileges = Collections.unmodifiableMap(clusterPrivileges);
this.indexPrivileges = unmodifiableMap2(indexPrivileges);
this.applicationPrivileges = unmodifiableMap3(applicationPrivileges);
}
private static Map<String, Map<String, Boolean>> unmodifiableMap2(final Map<String, Map<String, Boolean>> map) {
final Map<String, Map<String, Boolean>> copy = new HashMap<>(map);
copy.replaceAll((k, v) -> Collections.unmodifiableMap(v));
return Collections.unmodifiableMap(copy);
}
private static Map<String, Map<String, Map<String, Boolean>>> unmodifiableMap3(
final Map<String, Map<String, Map<String, Boolean>>> map) {
final Map<String, Map<String, Map<String, Boolean>>> copy = new HashMap<>(map);
copy.replaceAll((k, v) -> unmodifiableMap2(v));
return Collections.unmodifiableMap(copy);
}
public static HasPrivilegesResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
/**
* The username (principal) of the user for which the privileges check was executed.
*/
public String getUsername() {
return username;
}
/**
* {@code true} if the user has every privilege that was checked. Otherwise {@code false}.
*/
public boolean hasAllRequested() {
return hasAllRequested;
}
/**
* @param clusterPrivilegeName The name of a cluster privilege. This privilege must have been specified (verbatim) in the
* {@link HasPrivilegesRequest#getClusterPrivileges() cluster privileges of the request}.
* @return {@code true} if the user has the specified cluster privilege. {@code false} if the privilege was checked
* but it has not been granted to the user.
* @throws IllegalArgumentException if the response did not include a value for the specified privilege name.
* The response only includes values for privileges that were
* {@link HasPrivilegesRequest#getClusterPrivileges() included in the request}.
*/
public boolean hasClusterPrivilege(String clusterPrivilegeName) throws IllegalArgumentException {
Boolean has = clusterPrivileges.get(clusterPrivilegeName);
if (has == null) {
throw new IllegalArgumentException("Cluster privilege [" + clusterPrivilegeName + "] was not included in this response");
}
return has;
}
/**
* @param indexName The name of the index to check. This index must have been specified (verbatim) in the
* {@link HasPrivilegesRequest#getIndexPrivileges() requested index privileges}.
* @param privilegeName The name of the index privilege to check. This privilege must have been specified (verbatim), for the
* given index, in the {@link HasPrivilegesRequest#getIndexPrivileges() requested index privileges}.
* @return {@code true} if the user has the specified privilege on the specified index. {@code false} if the privilege was checked
* for that index and was not granted to the user.
* @throws IllegalArgumentException if the response did not include a value for the specified index and privilege name pair.
* The response only includes values for indices and privileges that were
* {@link HasPrivilegesRequest#getIndexPrivileges() included in the request}.
*/
public boolean hasIndexPrivilege(String indexName, String privilegeName) {
Map<String, Boolean> indexPrivileges = this.indexPrivileges.get(indexName);
if (indexPrivileges == null) {
throw new IllegalArgumentException("No privileges for index [" + indexName + "] were included in this response");
}
Boolean has = indexPrivileges.get(privilegeName);
if (has == null) {
throw new IllegalArgumentException("Privilege [" + privilegeName + "] was not included in the response for index ["
+ indexName + "]");
}
return has;
}
/**
* @param applicationName The name of the application to check. This application must have been specified (verbatim) in the
* {@link HasPrivilegesRequest#getApplicationPrivileges() requested application privileges}.
* @param resourceName The name of the resource to check. This resource must have been specified (verbatim), for the given
* application in the {@link HasPrivilegesRequest#getApplicationPrivileges() requested application privileges}.
* @param privilegeName The name of the privilege to check. This privilege must have been specified (verbatim), for the given
* application and resource, in the
* {@link HasPrivilegesRequest#getApplicationPrivileges() requested application privileges}.
* @return {@code true} if the user has the specified privilege on the specified resource in the specified application.
* {@code false} if the privilege was checked for that application and resource, but was not granted to the user.
* @throws IllegalArgumentException if the response did not include a value for the specified application, resource and privilege
* triplet. The response only includes values for applications, resources and privileges that were
* {@link HasPrivilegesRequest#getApplicationPrivileges() included in the request}.
*/
public boolean hasApplicationPrivilege(String applicationName, String resourceName, String privilegeName) {
final Map<String, Map<String, Boolean>> appPrivileges = this.applicationPrivileges.get(applicationName);
if (appPrivileges == null) {
throw new IllegalArgumentException("No privileges for application [" + applicationName + "] were included in this response");
}
final Map<String, Boolean> resourcePrivileges = appPrivileges.get(resourceName);
if (resourcePrivileges == null) {
throw new IllegalArgumentException("No privileges for resource [" + resourceName +
"] were included in the response for application [" + applicationName + "]");
}
Boolean has = resourcePrivileges.get(privilegeName);
if (has == null) {
throw new IllegalArgumentException("Privilege [" + privilegeName + "] was not included in the response for application [" +
applicationName + "] and resource [" + resourceName + "]");
}
return has;
}
/**
* A {@code Map} from cluster-privilege-name to access. Each requested privilege is included as a key in the map, and the
* associated value indicates whether the user was granted that privilege.
* <p>
* The {@link #hasClusterPrivilege} method should be used in preference to directly accessing this map.
* </p>
*/
public Map<String, Boolean> getClusterPrivileges() {
return clusterPrivileges;
}
/**
* A {@code Map} from index-name + privilege-name to access. Each requested index is a key in the outer map.
* Each requested privilege is a key in the inner map. The inner most {@code Boolean} value indicates whether
* the user was granted that privilege on that index.
* <p>
* The {@link #hasIndexPrivilege} method should be used in preference to directly accessing this map.
* </p>
*/
public Map<String, Map<String, Boolean>> getIndexPrivileges() {
return indexPrivileges;
}
/**
* A {@code Map} from application-name + resource-name + privilege-name to access. Each requested application is a key in the
* outer-most map. Each requested resource is a key in the next-level map. The requested privileges form the keys in the inner-most map.
* The {@code Boolean} value indicates whether the user was granted that privilege on that resource within that application.
* <p>
* The {@link #hasApplicationPrivilege} method should be used in preference to directly accessing this map.
* </p>
*/
public Map<String, Map<String, Map<String, Boolean>>> getApplicationPrivileges() {
return applicationPrivileges;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || this.getClass() != o.getClass()) {
return false;
}
final HasPrivilegesResponse that = (HasPrivilegesResponse) o;
return this.hasAllRequested == that.hasAllRequested &&
Objects.equals(this.username, that.username) &&
Objects.equals(this.clusterPrivileges, that.clusterPrivileges) &&
Objects.equals(this.indexPrivileges, that.indexPrivileges) &&
Objects.equals(this.applicationPrivileges, that.applicationPrivileges);
}
@Override
public int hashCode() {
return Objects.hash(username, hasAllRequested, clusterPrivileges, indexPrivileges, applicationPrivileges);
}
}

View File

@ -18,20 +18,15 @@
*/
package org.elasticsearch.client.xpack;
import org.elasticsearch.client.license.LicenseStatus;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.client.license.LicenseStatus;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@ -39,12 +34,11 @@ import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class XPackInfoResponse implements ToXContentObject {
public class XPackInfoResponse {
/**
* Value of the license's expiration time if it should never expire.
*/
@ -102,7 +96,11 @@ public class XPackInfoResponse implements ToXContentObject {
@Override
public String toString() {
return Strings.toString(this, true, false);
return "XPackInfoResponse{" +
"buildInfo=" + buildInfo +
", licenseInfo=" + licenseInfo +
", featureSetsInfo=" + featureSetsInfo +
'}';
}
private static final ConstructingObjectParser<XPackInfoResponse, Void> PARSER = new ConstructingObjectParser<>(
@ -131,41 +129,12 @@ public class XPackInfoResponse implements ToXContentObject {
(p, c, name) -> FeatureSetsInfo.FeatureSet.PARSER.parse(p, name),
new ParseField("features"));
}
public static XPackInfoResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (buildInfo != null) {
builder.field("build", buildInfo, params);
}
EnumSet<XPackInfoRequest.Category> categories = XPackInfoRequest.Category
.toSet(Strings.splitStringByCommaToArray(params.param("categories", "_all")));
if (licenseInfo != null) {
builder.field("license", licenseInfo, params);
} else if (categories.contains(XPackInfoRequest.Category.LICENSE)) {
// if the user requested the license info, and there is no license, we should send
// back an explicit null value (indicating there is no license). This is different
// than not adding the license info at all
builder.nullField("license");
}
if (featureSetsInfo != null) {
builder.field("features", featureSetsInfo, params);
}
if (params.paramAsBoolean("human", true)) {
builder.field("tagline", "You know, for X");
}
return builder.endObject();
}
public static class LicenseInfo implements ToXContentObject {
public static class LicenseInfo {
private final String uid;
private final String type;
private final String mode;
@ -217,6 +186,17 @@ public class XPackInfoResponse implements ToXContentObject {
return Objects.hash(uid, type, mode, status, expiryDate);
}
@Override
public String toString() {
return "LicenseInfo{" +
"uid='" + uid + '\'' +
", type='" + type + '\'' +
", mode='" + mode + '\'' +
", status=" + status +
", expiryDate=" + expiryDate +
'}';
}
private static final ConstructingObjectParser<LicenseInfo, Void> PARSER = new ConstructingObjectParser<>(
"license_info", true, (a, v) -> {
String uid = (String) a[0];
@ -234,22 +214,9 @@ public class XPackInfoResponse implements ToXContentObject {
PARSER.declareString(constructorArg(), new ParseField("status"));
PARSER.declareLong(optionalConstructorArg(), new ParseField("expiry_date_in_millis"));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject()
.field("uid", uid)
.field("type", type)
.field("mode", mode)
.field("status", status.label());
if (expiryDate != BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) {
builder.timeField("expiry_date_in_millis", "expiry_date", expiryDate);
}
return builder.endObject();
}
}
public static class BuildInfo implements ToXContentObject {
public static class BuildInfo {
private final String hash;
private final String timestamp;
@ -280,23 +247,23 @@ public class XPackInfoResponse implements ToXContentObject {
return Objects.hash(hash, timestamp);
}
@Override
public String toString() {
return "BuildInfo{" +
"hash='" + hash + '\'' +
", timestamp='" + timestamp + '\'' +
'}';
}
private static final ConstructingObjectParser<BuildInfo, Void> PARSER = new ConstructingObjectParser<>(
"build_info", true, (a, v) -> new BuildInfo((String) a[0], (String) a[1]));
static {
PARSER.declareString(constructorArg(), new ParseField("hash"));
PARSER.declareString(constructorArg(), new ParseField("date"));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject()
.field("hash", hash)
.field("date", timestamp)
.endObject();
}
}
public static class FeatureSetsInfo implements ToXContentObject {
public static class FeatureSetsInfo {
private final Map<String, FeatureSet> featureSets;
public FeatureSetsInfo(Set<FeatureSet> featureSets) {
@ -325,16 +292,13 @@ public class XPackInfoResponse implements ToXContentObject {
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
List<String> names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().collect(Collectors.toList());
for (String name : names) {
builder.field(name, featureSets.get(name), params);
}
return builder.endObject();
public String toString() {
return "FeatureSetsInfo{" +
"featureSets=" + featureSets +
'}';
}
public static class FeatureSet implements ToXContentObject {
public static class FeatureSet {
private final String name;
@Nullable private final String description;
private final boolean available;
@ -389,6 +353,17 @@ public class XPackInfoResponse implements ToXContentObject {
return Objects.hash(name, description, available, enabled, nativeCodeInfo);
}
@Override
public String toString() {
return "FeatureSet{" +
"name='" + name + '\'' +
", description='" + description + '\'' +
", available=" + available +
", enabled=" + enabled +
", nativeCodeInfo=" + nativeCodeInfo +
'}';
}
private static final ConstructingObjectParser<FeatureSet, String> PARSER = new ConstructingObjectParser<>(
"feature_set", true, (a, name) -> {
String description = (String) a[0];
@ -404,20 +379,6 @@ public class XPackInfoResponse implements ToXContentObject {
PARSER.declareBoolean(constructorArg(), new ParseField("enabled"));
PARSER.declareObject(optionalConstructorArg(), (p, name) -> p.map(), new ParseField("native_code_info"));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (description != null) {
builder.field("description", description);
}
builder.field("available", available);
builder.field("enabled", enabled);
if (nativeCodeInfo != null) {
builder.field("native_code_info", nativeCodeInfo);
}
return builder.endObject();
}
}
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.client;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.index.IndexRequest;
@ -31,6 +32,8 @@ import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.ccr.PauseFollowRequest;
import org.elasticsearch.client.ccr.PutFollowRequest;
import org.elasticsearch.client.ccr.PutFollowResponse;
import org.elasticsearch.client.ccr.ResumeFollowRequest;
import org.elasticsearch.client.ccr.UnfollowRequest;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
@ -63,7 +66,7 @@ public class CCRIT extends ESRestHighLevelClientTestCase {
assertThat(updateSettingsResponse.isAcknowledged(), is(true));
}
public void testCCR() throws Exception {
public void testIndexFollowing() throws Exception {
CcrClient ccrClient = highLevelClient().ccr();
CreateIndexRequest createIndexRequest = new CreateIndexRequest("leader");
@ -95,6 +98,33 @@ public class CCRIT extends ESRestHighLevelClientTestCase {
PauseFollowRequest pauseFollowRequest = new PauseFollowRequest("follower");
AcknowledgedResponse pauseFollowResponse = execute(pauseFollowRequest, ccrClient::pauseFollow, ccrClient::pauseFollowAsync);
assertThat(pauseFollowResponse.isAcknowledged(), is(true));
highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
ResumeFollowRequest resumeFollowRequest = new ResumeFollowRequest("follower");
AcknowledgedResponse resumeFollowResponse = execute(resumeFollowRequest, ccrClient::resumeFollow, ccrClient::resumeFollowAsync);
assertThat(resumeFollowResponse.isAcknowledged(), is(true));
assertBusy(() -> {
SearchRequest followerSearchRequest = new SearchRequest("follower");
SearchResponse followerSearchResponse = highLevelClient().search(followerSearchRequest, RequestOptions.DEFAULT);
assertThat(followerSearchResponse.getHits().getTotalHits(), equalTo(2L));
});
// Need to pause prior to unfollowing it:
pauseFollowRequest = new PauseFollowRequest("follower");
pauseFollowResponse = execute(pauseFollowRequest, ccrClient::pauseFollow, ccrClient::pauseFollowAsync);
assertThat(pauseFollowResponse.isAcknowledged(), is(true));
// Need to close index prior to unfollowing it:
CloseIndexRequest closeIndexRequest = new CloseIndexRequest("follower");
org.elasticsearch.action.support.master.AcknowledgedResponse closeIndexReponse =
highLevelClient().indices().close(closeIndexRequest, RequestOptions.DEFAULT);
assertThat(closeIndexReponse.isAcknowledged(), is(true));
UnfollowRequest unfollowRequest = new UnfollowRequest("follower");
AcknowledgedResponse unfollowResponse = execute(unfollowRequest, ccrClient::unfollow, ccrClient::unfollowAsync);
assertThat(unfollowResponse.isAcknowledged(), is(true));
}
private static Map<String, Object> toMap(Response response) throws IOException {

View File

@ -44,6 +44,8 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.core.MultiTermVectorsRequest;
import org.elasticsearch.client.core.MultiTermVectorsResponse;
import org.elasticsearch.client.core.TermVectorsRequest;
import org.elasticsearch.client.core.TermVectorsResponse;
import org.elasticsearch.common.Strings;
@ -73,6 +75,7 @@ import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -192,8 +195,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync));
}
}
public void testSourceExists() throws IOException {
public void testSourceExists() throws IOException {
{
GetRequest getRequest = new GetRequest("index", "type", "id");
assertFalse(execute(getRequest, highLevelClient()::existsSource, highLevelClient()::existsSourceAsync));
@ -215,8 +218,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
assertFalse(execute(getRequest, highLevelClient()::existsSource, highLevelClient()::existsSourceAsync));
}
}
public void testSourceDoesNotExist() throws IOException {
public void testSourceDoesNotExist() throws IOException {
final String noSourceIndex = "no_source";
{
// Prepare
@ -224,8 +227,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
String mapping = "\"_doc\": { \"_source\": {\n" +
" \"enabled\": false\n" +
String mapping = "\"_doc\": { \"_source\": {\n" +
" \"enabled\": false\n" +
" } }";
createIndex(noSourceIndex, settings, mapping);
assertEquals(
@ -240,13 +243,13 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
RequestOptions.DEFAULT
).status()
);
}
}
{
GetRequest getRequest = new GetRequest(noSourceIndex, "_doc", "1");
assertTrue(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync));
assertFalse(execute(getRequest, highLevelClient()::existsSource, highLevelClient()::existsSourceAsync));
}
}
}
public void testGet() throws IOException {
{
@ -1154,10 +1157,10 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
}
{
// test _termvectors on artificial documents
TermVectorsRequest tvRequest = new TermVectorsRequest(sourceIndex, "_doc");
XContentBuilder docBuilder = XContentFactory.jsonBuilder();
docBuilder.startObject().field("field", "valuex").endObject();
tvRequest.setDoc(docBuilder);
TermVectorsRequest tvRequest = new TermVectorsRequest(sourceIndex, "_doc", docBuilder);
TermVectorsResponse tvResponse = execute(tvRequest, highLevelClient()::termvectors, highLevelClient()::termvectorsAsync);
TermVectorsResponse.TermVector.Token expectedToken = new TermVectorsResponse.TermVector.Token(0, 6, 0, null);
@ -1183,4 +1186,69 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
() -> execute(request, highLevelClient()::termvectors, highLevelClient()::termvectorsAsync));
assertEquals(RestStatus.NOT_FOUND, exception.status());
}
// Not entirely sure if _mtermvectors belongs to CRUD, and in the absence of a better place, will have it here
public void testMultiTermvectors() throws IOException {
final String sourceIndex = "index1";
{
// prepare : index docs
Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
String mappings = "\"_doc\":{\"properties\":{\"field\":{\"type\":\"text\"}}}";
createIndex(sourceIndex, settings, mappings);
assertEquals(
RestStatus.OK,
highLevelClient().bulk(
new BulkRequest()
.add(new IndexRequest(sourceIndex, "_doc", "1")
.source(Collections.singletonMap("field", "value1"), XContentType.JSON))
.add(new IndexRequest(sourceIndex, "_doc", "2")
.source(Collections.singletonMap("field", "value2"), XContentType.JSON))
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
RequestOptions.DEFAULT
).status()
);
}
{
// test _mtermvectors where MultiTermVectorsRequest is constructed with ids and a template
String[] expectedIds = {"1", "2"};
TermVectorsRequest tvRequestTemplate = new TermVectorsRequest(sourceIndex, "_doc", "fake_id");
tvRequestTemplate.setFields("field");
MultiTermVectorsRequest mtvRequest = new MultiTermVectorsRequest(expectedIds, tvRequestTemplate);
MultiTermVectorsResponse mtvResponse =
execute(mtvRequest, highLevelClient()::mtermvectors, highLevelClient()::mtermvectorsAsync);
List<String> ids = new ArrayList<>();
for (TermVectorsResponse tvResponse: mtvResponse.getTermVectorsResponses()) {
assertThat(tvResponse.getIndex(), equalTo(sourceIndex));
assertTrue(tvResponse.getFound());
ids.add(tvResponse.getId());
}
assertArrayEquals(expectedIds, ids.toArray());
}
{
// test _mtermvectors where MultiTermVectorsRequest constructed with adding each separate request
MultiTermVectorsRequest mtvRequest = new MultiTermVectorsRequest();
TermVectorsRequest tvRequest1 = new TermVectorsRequest(sourceIndex, "_doc", "1");
tvRequest1.setFields("field");
mtvRequest.add(tvRequest1);
XContentBuilder docBuilder = XContentFactory.jsonBuilder();
docBuilder.startObject().field("field", "valuex").endObject();
TermVectorsRequest tvRequest2 = new TermVectorsRequest(sourceIndex, "_doc", docBuilder);
mtvRequest.add(tvRequest2);
MultiTermVectorsResponse mtvResponse =
execute(mtvRequest, highLevelClient()::mtermvectors, highLevelClient()::mtermvectorsAsync);
for (TermVectorsResponse tvResponse: mtvResponse.getTermVectorsResponses()) {
assertThat(tvResponse.getIndex(), equalTo(sourceIndex));
assertTrue(tvResponse.getFound());
}
}
}
}

View File

@ -194,7 +194,7 @@ public class IndexLifecycleIT extends ESRestHighLevelClientTestCase {
assertEquals("foo-01", fooResponse.getIndex());
assertEquals("hot", fooResponse.getPhase());
assertEquals("rollover", fooResponse.getAction());
assertEquals("attempt_rollover", fooResponse.getStep());
assertEquals("check-rollover-ready", fooResponse.getStep());
assertEquals(new PhaseExecutionInfo(policy.getName(), new Phase("", hotPhase.getMinimumAge(), hotPhase.getActions()),
1L, expectedPolicyModifiedDate), fooResponse.getPhaseExecutionInfo());
IndexLifecycleExplainResponse bazResponse = indexResponses.get("baz-01");
@ -203,7 +203,7 @@ public class IndexLifecycleIT extends ESRestHighLevelClientTestCase {
assertEquals("baz-01", bazResponse.getIndex());
assertEquals("hot", bazResponse.getPhase());
assertEquals("rollover", bazResponse.getAction());
assertEquals("attempt_rollover", bazResponse.getStep());
assertEquals("check-rollover-ready", bazResponse.getStep());
IndexLifecycleExplainResponse squashResponse = indexResponses.get("squash");
assertNotNull(squashResponse);
assertFalse(squashResponse.managedByILM());
@ -272,8 +272,8 @@ public class IndexLifecycleIT extends ESRestHighLevelClientTestCase {
RetryLifecyclePolicyRequest retryRequest = new RetryLifecyclePolicyRequest("retry");
ElasticsearchStatusException ex = expectThrows(ElasticsearchStatusException.class,
() -> execute(
retryRequest, highLevelClient().indexLifecycle()::retryLifecycleStep,
highLevelClient().indexLifecycle()::retryLifecycleStepAsync
retryRequest, highLevelClient().indexLifecycle()::retryLifecyclePolicy,
highLevelClient().indexLifecycle()::retryLifecyclePolicyAsync
)
);
assertEquals(400, ex.status().getStatus());

View File

@ -24,13 +24,17 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.ForecastJobRequest;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetDatafeedRequest;
@ -43,19 +47,25 @@ import org.elasticsearch.client.ml.GetModelSnapshotsRequest;
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PostCalendarEventRequest;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedRequestTests;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotRequest;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.calendars.CalendarTests;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedConfigTests;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
@ -68,6 +78,7 @@ import org.elasticsearch.client.ml.job.config.MlFilterTests;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
@ -78,6 +89,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
@ -362,6 +374,16 @@ public class MLRequestConvertersTests extends ESTestCase {
request.getParameters().get(DeleteForecastRequest.ALLOW_NO_FORECASTS.getPreferredName()));
}
public void testDeleteModelSnapshot() {
String jobId = randomAlphaOfLength(10);
String snapshotId = randomAlphaOfLength(10);
DeleteModelSnapshotRequest deleteModelSnapshotRequest = new DeleteModelSnapshotRequest(jobId, snapshotId);
Request request = MLRequestConverters.deleteModelSnapshot(deleteModelSnapshotRequest);
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId, request.getEndpoint());
}
public void testGetBuckets() throws IOException {
String jobId = randomAlphaOfLength(10);
GetBucketsRequest getBucketsRequest = new GetBucketsRequest(jobId);
@ -409,6 +431,40 @@ public class MLRequestConvertersTests extends ESTestCase {
}
}
public void testUpdateModelSnapshot() throws IOException {
String jobId = randomAlphaOfLength(10);
String snapshotId = randomAlphaOfLength(10);
UpdateModelSnapshotRequest updateModelSnapshotRequest = new UpdateModelSnapshotRequest(jobId, snapshotId);
updateModelSnapshotRequest.setDescription("My First Snapshot");
updateModelSnapshotRequest.setRetain(true);
Request request = MLRequestConverters.updateModelSnapshot(updateModelSnapshotRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_update", request.getEndpoint());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
UpdateModelSnapshotRequest parsedRequest = UpdateModelSnapshotRequest.PARSER.apply(parser, null);
assertThat(parsedRequest, equalTo(updateModelSnapshotRequest));
}
}
public void testRevertModelSnapshot() throws IOException {
String jobId = randomAlphaOfLength(10);
String snapshotId = randomAlphaOfLength(10);
RevertModelSnapshotRequest revertModelSnapshotRequest = new RevertModelSnapshotRequest(jobId, snapshotId);
if (randomBoolean()) {
revertModelSnapshotRequest.setDeleteInterveningResults(randomBoolean());
}
Request request = MLRequestConverters.revertModelSnapshot(revertModelSnapshotRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_revert",
request.getEndpoint());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
RevertModelSnapshotRequest parsedRequest = RevertModelSnapshotRequest.PARSER.apply(parser, null);
assertThat(parsedRequest, equalTo(revertModelSnapshotRequest));
}
}
public void testGetOverallBuckets() throws IOException {
String jobId = randomAlphaOfLength(10);
GetOverallBucketsRequest getOverallBucketsRequest = new GetOverallBucketsRequest(jobId);
@ -506,6 +562,26 @@ public class MLRequestConvertersTests extends ESTestCase {
}
}
public void testPutCalendarJob() {
String calendarId = randomAlphaOfLength(10);
String job1 = randomAlphaOfLength(5);
String job2 = randomAlphaOfLength(5);
PutCalendarJobRequest putCalendarJobRequest = new PutCalendarJobRequest(calendarId, job1, job2);
Request request = MLRequestConverters.putCalendarJob(putCalendarJobRequest);
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/calendars/" + calendarId + "/jobs/" + job1 + "," + job2, request.getEndpoint());
}
public void testDeleteCalendarJob() {
String calendarId = randomAlphaOfLength(10);
String job1 = randomAlphaOfLength(5);
String job2 = randomAlphaOfLength(5);
DeleteCalendarJobRequest deleteCalendarJobRequest = new DeleteCalendarJobRequest(calendarId, job1, job2);
Request request = MLRequestConverters.deleteCalendarJob(deleteCalendarJobRequest);
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/calendars/" + calendarId + "/jobs/" + job1 + "," + job2, request.getEndpoint());
}
public void testGetCalendars() throws IOException {
GetCalendarsRequest getCalendarsRequest = new GetCalendarsRequest();
String expectedEndpoint = "/_xpack/ml/calendars";
@ -535,6 +611,39 @@ public class MLRequestConvertersTests extends ESTestCase {
assertEquals("/_xpack/ml/calendars/" + deleteCalendarRequest.getCalendarId(), request.getEndpoint());
}
public void testGetCalendarEvents() throws IOException {
String calendarId = randomAlphaOfLength(10);
GetCalendarEventsRequest getCalendarEventsRequest = new GetCalendarEventsRequest(calendarId);
getCalendarEventsRequest.setStart("2018-08-08T00:00:00Z");
getCalendarEventsRequest.setEnd("2018-09-08T00:00:00Z");
getCalendarEventsRequest.setPageParams(new PageParams(100, 300));
getCalendarEventsRequest.setJobId(randomAlphaOfLength(10));
Request request = MLRequestConverters.getCalendarEvents(getCalendarEventsRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/calendars/" + calendarId + "/events", request.getEndpoint());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
GetCalendarEventsRequest parsedRequest = GetCalendarEventsRequest.PARSER.apply(parser, null);
assertThat(parsedRequest, equalTo(getCalendarEventsRequest));
}
}
public void testPostCalendarEvent() throws Exception {
String calendarId = randomAlphaOfLength(10);
List<ScheduledEvent> events = Arrays.asList(ScheduledEventTests.testInstance(),
ScheduledEventTests.testInstance(),
ScheduledEventTests.testInstance());
PostCalendarEventRequest postCalendarEventRequest = new PostCalendarEventRequest(calendarId, events);
Request request = MLRequestConverters.postCalendarEvents(postCalendarEventRequest);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals("/_xpack/ml/calendars/" + calendarId + "/events", request.getEndpoint());
XContentBuilder builder = JsonXContent.contentBuilder();
builder = postCalendarEventRequest.toXContent(builder, PostCalendarEventRequest.EXCLUDE_CALENDAR_ID_PARAMS);
assertEquals(Strings.toString(builder), requestEntityToString(request));
}
public void testPutFilter() throws IOException {
MlFilter filter = MlFilterTests.createRandomBuilder("foo").build();
PutFilterRequest putFilterRequest = new PutFilterRequest(filter);
@ -585,6 +694,17 @@ public class MLRequestConvertersTests extends ESTestCase {
}
}
public void testDeleteFilter() {
MlFilter filter = MlFilterTests.createRandomBuilder("foo").build();
DeleteFilterRequest deleteFilterRequest = new DeleteFilterRequest(filter.getId());
Request request = MLRequestConverters.deleteFilter(deleteFilterRequest);
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/filters/foo"));
assertNull(request.getEntity());
}
private static Job createValidJob(String jobId) {
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
Detector.builder().setFunction("count").build()));

View File

@ -29,15 +29,20 @@ import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.ForecastJobRequest;
import org.elasticsearch.client.ml.ForecastJobResponse;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsResponse;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
@ -50,12 +55,17 @@ import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetJobStatsResponse;
import org.elasticsearch.client.ml.GetModelSnapshotsRequest;
import org.elasticsearch.client.ml.GetModelSnapshotsResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostCalendarEventRequest;
import org.elasticsearch.client.ml.PostCalendarEventResponse;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
@ -64,6 +74,8 @@ import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutFilterResponse;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.PutJobResponse;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotResponse;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
@ -71,8 +83,12 @@ import org.elasticsearch.client.ml.StopDatafeedResponse;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotResponse;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.calendars.CalendarTests;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedState;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
@ -84,7 +100,9 @@ import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.config.JobState;
import org.elasticsearch.client.ml.job.config.JobUpdate;
import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestStatus;
@ -824,6 +842,49 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertThat(putCalendarResponse.getCalendar(), equalTo(calendar));
}
public void testPutCalendarJob() throws IOException {
Calendar calendar = new Calendar("put-calendar-job-id", Collections.singletonList("put-calendar-job-0"), null);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
PutCalendarResponse putCalendarResponse =
machineLearningClient.putCalendar(new PutCalendarRequest(calendar), RequestOptions.DEFAULT);
assertThat(putCalendarResponse.getCalendar().getJobIds(), containsInAnyOrder( "put-calendar-job-0"));
String jobId1 = "put-calendar-job-1";
String jobId2 = "put-calendar-job-2";
PutCalendarJobRequest putCalendarJobRequest = new PutCalendarJobRequest(calendar.getId(), jobId1, jobId2);
putCalendarResponse = execute(putCalendarJobRequest,
machineLearningClient::putCalendarJob,
machineLearningClient::putCalendarJobAsync);
assertThat(putCalendarResponse.getCalendar().getJobIds(), containsInAnyOrder(jobId1, jobId2, "put-calendar-job-0"));
}
public void testDeleteCalendarJob() throws IOException {
Calendar calendar = new Calendar("del-calendar-job-id",
Arrays.asList("del-calendar-job-0", "del-calendar-job-1", "del-calendar-job-2"),
null);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
PutCalendarResponse putCalendarResponse =
machineLearningClient.putCalendar(new PutCalendarRequest(calendar), RequestOptions.DEFAULT);
assertThat(putCalendarResponse.getCalendar().getJobIds(),
containsInAnyOrder("del-calendar-job-0", "del-calendar-job-1", "del-calendar-job-2"));
String jobId1 = "del-calendar-job-0";
String jobId2 = "del-calendar-job-2";
DeleteCalendarJobRequest deleteCalendarJobRequest = new DeleteCalendarJobRequest(calendar.getId(), jobId1, jobId2);
putCalendarResponse = execute(deleteCalendarJobRequest,
machineLearningClient::deleteCalendarJob,
machineLearningClient::deleteCalendarJobAsync);
assertThat(putCalendarResponse.getCalendar().getJobIds(), containsInAnyOrder("del-calendar-job-1"));
}
public void testGetCalendars() throws Exception {
Calendar calendar1 = CalendarTests.testInstance();
Calendar calendar2 = CalendarTests.testInstance();
@ -866,6 +927,65 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertThat(exception.status().getStatus(), equalTo(404));
}
public void testGetCalendarEvent() throws Exception {
Calendar calendar = new Calendar("get-calendar-event-id", Collections.singletonList("get-calendar-event-job"), null);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
machineLearningClient.putCalendar(new PutCalendarRequest(calendar), RequestOptions.DEFAULT);
List<ScheduledEvent> events = new ArrayList<>(3);
for (int i = 0; i < 3; i++) {
events.add(ScheduledEventTests.testInstance(calendar.getId(), null));
}
machineLearningClient.postCalendarEvent(new PostCalendarEventRequest(calendar.getId(), events), RequestOptions.DEFAULT);
{
GetCalendarEventsRequest getCalendarEventsRequest = new GetCalendarEventsRequest(calendar.getId());
GetCalendarEventsResponse getCalendarEventsResponse = execute(getCalendarEventsRequest,
machineLearningClient::getCalendarEvents,
machineLearningClient::getCalendarEventsAsync);
assertThat(getCalendarEventsResponse.events().size(), equalTo(3));
assertThat(getCalendarEventsResponse.count(), equalTo(3L));
}
{
GetCalendarEventsRequest getCalendarEventsRequest = new GetCalendarEventsRequest(calendar.getId());
getCalendarEventsRequest.setPageParams(new PageParams(1, 2));
GetCalendarEventsResponse getCalendarEventsResponse = execute(getCalendarEventsRequest,
machineLearningClient::getCalendarEvents,
machineLearningClient::getCalendarEventsAsync);
assertThat(getCalendarEventsResponse.events().size(), equalTo(2));
assertThat(getCalendarEventsResponse.count(), equalTo(3L));
}
{
machineLearningClient.putJob(new PutJobRequest(buildJob("get-calendar-event-job")), RequestOptions.DEFAULT);
GetCalendarEventsRequest getCalendarEventsRequest = new GetCalendarEventsRequest("_all");
getCalendarEventsRequest.setJobId("get-calendar-event-job");
GetCalendarEventsResponse getCalendarEventsResponse = execute(getCalendarEventsRequest,
machineLearningClient::getCalendarEvents,
machineLearningClient::getCalendarEventsAsync);
assertThat(getCalendarEventsResponse.events().size(), equalTo(3));
assertThat(getCalendarEventsResponse.count(), equalTo(3L));
}
}
public void testPostCalendarEvent() throws Exception {
Calendar calendar = CalendarTests.testInstance();
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
machineLearningClient.putCalendar(new PutCalendarRequest(calendar), RequestOptions.DEFAULT);
List<ScheduledEvent> events = new ArrayList<>(3);
for (int i = 0; i < 3; i++) {
events.add(ScheduledEventTests.testInstance(calendar.getId(), null));
}
PostCalendarEventRequest postCalendarEventRequest = new PostCalendarEventRequest(calendar.getId(), events);
PostCalendarEventResponse postCalendarEventResponse = execute(postCalendarEventRequest,
machineLearningClient::postCalendarEvent,
machineLearningClient::postCalendarEventAsync);
assertThat(postCalendarEventResponse.getScheduledEvents(), containsInAnyOrder(events.toArray()));
}
public void testPutFilter() throws Exception {
String filterId = "filter-job-test";
MlFilter mlFilter = MlFilter.builder(filterId)
@ -951,6 +1071,32 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertThat(filter.getItems(), contains("newItem1", "newItem2", "olditem2"));
}
public void testDeleteFilter() throws Exception {
String filterId = "delete-filter-job-test";
MlFilter mlFilter = MlFilter.builder(filterId)
.setDescription(randomAlphaOfLength(10))
.setItems(generateRandomStringArray(10, 10, false, false))
.build();
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
PutFilterResponse putFilterResponse = execute(new PutFilterRequest(mlFilter),
machineLearningClient::putFilter,
machineLearningClient::putFilterAsync);
MlFilter createdFilter = putFilterResponse.getResponse();
assertThat(createdFilter, equalTo(mlFilter));
DeleteFilterRequest deleteFilterRequest = new DeleteFilterRequest(filterId);
AcknowledgedResponse response = execute(deleteFilterRequest, machineLearningClient::deleteFilter,
machineLearningClient::deleteFilterAsync);
assertTrue(response.isAcknowledged());
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
() -> execute(deleteFilterRequest, machineLearningClient::deleteFilter,
machineLearningClient::deleteFilterAsync));
assertThat(exception.status().getStatus(), equalTo(404));
}
public static String randomValidJobId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);
@ -996,4 +1142,129 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
highLevelClient().machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
return datafeedId;
}
public void createModelSnapshot(String jobId, String snapshotId) throws IOException {
String documentId = jobId + "_model_snapshot_" + snapshotId;
Job job = MachineLearningIT.buildJob(jobId);
highLevelClient().machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc", documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"" + jobId + "\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"" + snapshotId + "\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"" + jobId + "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
}
public void createModelSnapshots(String jobId, List<String> snapshotIds) throws IOException {
Job job = MachineLearningIT.buildJob(jobId);
highLevelClient().machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
for(String snapshotId : snapshotIds) {
String documentId = jobId + "_model_snapshot_" + snapshotId;
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc", documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"" + jobId + "\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"" + snapshotId + "\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"" + jobId + "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false, " +
"\"quantiles\":{\"job_id\":\""+jobId+"\", \"timestamp\":1541587919000, " +
"\"quantile_state\":\"state\"}}", XContentType.JSON);
highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
}
}
public void testDeleteModelSnapshot() throws IOException {
String jobId = "test-delete-model-snapshot";
String snapshotId = "1541587919";
createModelSnapshot(jobId, snapshotId);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
DeleteModelSnapshotRequest request = new DeleteModelSnapshotRequest(jobId, snapshotId);
AcknowledgedResponse response = execute(request, machineLearningClient::deleteModelSnapshot,
machineLearningClient::deleteModelSnapshotAsync);
assertTrue(response.isAcknowledged());
}
public void testUpdateModelSnapshot() throws Exception {
String jobId = "test-update-model-snapshot";
String snapshotId = "1541587919";
createModelSnapshot(jobId, snapshotId);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
GetModelSnapshotsRequest getModelSnapshotsRequest = new GetModelSnapshotsRequest(jobId);
GetModelSnapshotsResponse getModelSnapshotsResponse1 = execute(getModelSnapshotsRequest, machineLearningClient::getModelSnapshots,
machineLearningClient::getModelSnapshotsAsync);
assertEquals(getModelSnapshotsResponse1.count(), 1L);
assertEquals("State persisted due to job close at 2018-11-07T10:51:59+0000",
getModelSnapshotsResponse1.snapshots().get(0).getDescription());
UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId);
request.setDescription("Updated description");
request.setRetain(true);
UpdateModelSnapshotResponse response = execute(request, machineLearningClient::updateModelSnapshot,
machineLearningClient::updateModelSnapshotAsync);
assertTrue(response.getAcknowledged());
assertEquals("Updated description", response.getModel().getDescription());
assertTrue(response.getModel().getRetain());
GetModelSnapshotsResponse getModelSnapshotsResponse2 = execute(getModelSnapshotsRequest, machineLearningClient::getModelSnapshots,
machineLearningClient::getModelSnapshotsAsync);
assertEquals(getModelSnapshotsResponse2.count(), 1L);
assertEquals("Updated description",
getModelSnapshotsResponse2.snapshots().get(0).getDescription());
}
public void testRevertModelSnapshot() throws IOException {
String jobId = "test-revert-model-snapshot";
List<String> snapshotIds = new ArrayList<>();
String snapshotId1 = "1541587919";
String snapshotId2 = "1541588919";
String snapshotId3 = "1541589919";
snapshotIds.add(snapshotId1);
snapshotIds.add(snapshotId2);
snapshotIds.add(snapshotId3);
createModelSnapshots(jobId, snapshotIds);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
for (String snapshotId : snapshotIds){
RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId);
if (randomBoolean()) {
request.setDeleteInterveningResults(randomBoolean());
}
RevertModelSnapshotResponse response = execute(request, machineLearningClient::revertModelSnapshot,
machineLearningClient::revertModelSnapshotAsync);
ModelSnapshot model = response.getModel();
assertEquals(snapshotId, model.getSnapshotId());
}
}
}

View File

@ -106,15 +106,19 @@ public class ReindexIT extends ESRestHighLevelClientTestCase {
);
}
{
ReindexRequest reindexRequest = new ReindexRequest();
// tag::submit-reindex-task
ReindexRequest reindexRequest = new ReindexRequest(); // <1>
reindexRequest.setSourceIndices(sourceIndex);
reindexRequest.setDestIndex(destinationIndex);
reindexRequest.setSourceQuery(new IdsQueryBuilder().addIds("1").types("type"));
reindexRequest.setRefresh(true);
TaskSubmissionResponse reindexSubmission = highLevelClient().submitReindexTask(reindexRequest, RequestOptions.DEFAULT);
TaskSubmissionResponse reindexSubmission = highLevelClient()
.submitReindexTask(reindexRequest, RequestOptions.DEFAULT); // <2>
BooleanSupplier hasUpgradeCompleted = checkCompletionStatus(reindexSubmission.getTask());
String taskId = reindexSubmission.getTask(); // <3>
// end::submit-reindex-task
BooleanSupplier hasUpgradeCompleted = checkCompletionStatus(taskId);
awaitBusy(hasUpgradeCompleted);
}
}

View File

@ -53,6 +53,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.client.core.MultiTermVectorsRequest;
import org.elasticsearch.client.core.TermVectorsRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
@ -1303,6 +1304,26 @@ public class RequestConvertersTests extends ESTestCase {
assertToXContentBody(tvRequest, request.getEntity());
}
public void testMultiTermVectors() throws IOException {
MultiTermVectorsRequest mtvRequest = new MultiTermVectorsRequest();
int numberOfRequests = randomIntBetween(0, 5);
for (int i = 0; i < numberOfRequests; i++) {
String index = randomAlphaOfLengthBetween(3, 10);
String type = randomAlphaOfLengthBetween(3, 10);
String id = randomAlphaOfLengthBetween(3, 10);
TermVectorsRequest tvRequest = new TermVectorsRequest(index, type, id);
String[] fields = generateRandomStringArray(10, 5, false, false);
tvRequest.setFields(fields);
mtvRequest.add(tvRequest);
}
Request request = RequestConverters.mtermVectors(mtvRequest);
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
assertEquals("_mtermvectors", request.getEndpoint());
assertToXContentBody(mtvRequest, request.getEntity());
}
public void testFieldCaps() {
// Create a random request.
String[] indices = randomIndicesNames(0, 5);

View File

@ -674,7 +674,6 @@ public class RestHighLevelClientTests extends ESTestCase {
"indices.exists_type",
"indices.get_upgrade",
"indices.put_alias",
"mtermvectors",
"render_search_template",
"scripts_painless_execute"
};

View File

@ -29,8 +29,8 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
import org.elasticsearch.client.rollup.DeleteRollupJobResponse;
import org.elasticsearch.client.rollup.GetRollupCapsRequest;
import org.elasticsearch.client.rollup.GetRollupCapsResponse;
import org.elasticsearch.client.rollup.GetRollupIndexCapsRequest;
@ -40,11 +40,10 @@ import org.elasticsearch.client.rollup.GetRollupJobResponse;
import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState;
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
import org.elasticsearch.client.rollup.PutRollupJobRequest;
import org.elasticsearch.client.rollup.PutRollupJobResponse;
import org.elasticsearch.client.rollup.RollableIndexCaps;
import org.elasticsearch.client.rollup.RollupJobCaps;
import org.elasticsearch.client.rollup.StartRollupJobRequest;
import org.elasticsearch.client.rollup.StartRollupJobResponse;
import org.elasticsearch.client.rollup.RollableIndexCaps;
import org.elasticsearch.client.rollup.RollupJobCaps;
import org.elasticsearch.client.rollup.StopRollupJobRequest;
import org.elasticsearch.client.rollup.StopRollupJobResponse;
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
@ -158,7 +157,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
final RollupClient rollupClient = highLevelClient().rollup();
execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
DeleteRollupJobRequest deleteRollupJobRequest = new DeleteRollupJobRequest(id);
DeleteRollupJobResponse deleteRollupJobResponse = highLevelClient().rollup()
AcknowledgedResponse deleteRollupJobResponse = highLevelClient().rollup()
.deleteRollupJob(deleteRollupJobRequest, RequestOptions.DEFAULT);
assertTrue(deleteRollupJobResponse.isAcknowledged());
}
@ -180,7 +179,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
final RollupClient rollupClient = highLevelClient().rollup();
PutRollupJobResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
AcknowledgedResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
assertTrue(response.isAcknowledged());
StartRollupJobRequest startRequest = new StartRollupJobRequest(id);
@ -235,8 +234,14 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
// stop the job
StopRollupJobRequest stopRequest = new StopRollupJobRequest(id);
stopRequest.waitForCompletion(randomBoolean());
StopRollupJobResponse stopResponse = execute(stopRequest, rollupClient::stopRollupJob, rollupClient::stopRollupJobAsync);
assertTrue(stopResponse.isAcknowledged());
if (stopRequest.waitForCompletion()) {
getResponse = execute(new GetRollupJobRequest(id), rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
assertThat(getResponse.getJobs(), hasSize(1));
assertThat(getResponse.getJobs().get(0).getStatus().getState(), equalTo(IndexerState.STOPPED));
}
}
public void testGetMissingRollupJob() throws Exception {
@ -307,7 +312,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
final RollupClient rollupClient = highLevelClient().rollup();
PutRollupJobResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
AcknowledgedResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
assertTrue(response.isAcknowledged());
// wait for the PutJob api to create the index w/ metadata
@ -419,7 +424,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
new PutRollupJobRequest(new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout));
final RollupClient rollupClient = highLevelClient().rollup();
PutRollupJobResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
AcknowledgedResponse response = execute(putRollupJobRequest, rollupClient::putRollupJob, rollupClient::putRollupJobAsync);
assertTrue(response.isAcknowledged());
// wait for the PutJob api to create the index w/ metadata

View File

@ -28,6 +28,7 @@ import org.elasticsearch.client.rollup.StartRollupJobRequest;
import org.elasticsearch.client.rollup.StopRollupJobRequest;
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -64,13 +65,27 @@ public class RollupRequestConvertersTests extends ESTestCase {
public void testStopJob() throws IOException {
String jobId = randomAlphaOfLength(5);
StopRollupJobRequest stopJob = new StopRollupJobRequest(jobId);
String expectedTimeOutString = null;
String expectedWaitForCompletion = null;
int expectedParameters = 0;
if (randomBoolean()) {
stopJob.timeout(TimeValue.parseTimeValue(randomPositiveTimeValue(), "timeout"));
expectedTimeOutString = stopJob.timeout().getStringRep();
expectedParameters++;
}
if (randomBoolean()) {
stopJob.waitForCompletion(randomBoolean());
expectedWaitForCompletion = stopJob.waitForCompletion().toString();
expectedParameters++;
}
Request request = RollupRequestConverters.stopJob(stopJob);
assertThat(request.getEndpoint(), equalTo("/_xpack/rollup/job/" + jobId + "/_stop"));
assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod()));
assertThat(request.getParameters().keySet(), empty());
assertThat(request.getParameters().keySet().size(), equalTo(expectedParameters));
assertThat(request.getParameters().get("timeout"), equalTo(expectedTimeOutString));
assertThat(request.getParameters().get("wait_for_completion"), equalTo(expectedWaitForCompletion));
assertNull(request.getEntity());
}

View File

@ -144,6 +144,14 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
CreateSnapshotResponse response = createTestSnapshot(request);
assertEquals(waitForCompletion ? RestStatus.OK : RestStatus.ACCEPTED, response.status());
if (waitForCompletion == false) {
// If we don't wait for the snapshot to complete we have to cancel it to not leak the snapshot task
AcknowledgedResponse deleteResponse = execute(
new DeleteSnapshotRequest(repository, snapshot),
highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync
);
assertTrue(deleteResponse.isAcknowledged());
}
}
public void testGetSnapshots() throws IOException {

View File

@ -0,0 +1,116 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ccr;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class ResumeFollowRequestTests extends AbstractXContentTestCase<ResumeFollowRequest> {
private static final ConstructingObjectParser<ResumeFollowRequest, Void> PARSER = new ConstructingObjectParser<>("test_parser",
true, (args) -> new ResumeFollowRequest((String) args[0]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), PutFollowRequest.FOLLOWER_INDEX_FIELD);
PARSER.declareInt(ResumeFollowRequest::setMaxReadRequestOperationCount, FollowConfig.MAX_READ_REQUEST_OPERATION_COUNT);
PARSER.declareField(
ResumeFollowRequest::setMaxReadRequestSize,
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), FollowConfig.MAX_READ_REQUEST_SIZE.getPreferredName()),
PutFollowRequest.MAX_READ_REQUEST_SIZE,
ObjectParser.ValueType.STRING);
PARSER.declareInt(ResumeFollowRequest::setMaxOutstandingReadRequests, FollowConfig.MAX_OUTSTANDING_READ_REQUESTS);
PARSER.declareInt(ResumeFollowRequest::setMaxWriteRequestOperationCount, FollowConfig.MAX_WRITE_REQUEST_OPERATION_COUNT);
PARSER.declareField(
ResumeFollowRequest::setMaxWriteRequestSize,
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), FollowConfig.MAX_WRITE_REQUEST_SIZE.getPreferredName()),
PutFollowRequest.MAX_WRITE_REQUEST_SIZE,
ObjectParser.ValueType.STRING);
PARSER.declareInt(ResumeFollowRequest::setMaxOutstandingWriteRequests, FollowConfig.MAX_OUTSTANDING_WRITE_REQUESTS);
PARSER.declareInt(ResumeFollowRequest::setMaxWriteBufferCount, FollowConfig.MAX_WRITE_BUFFER_COUNT);
PARSER.declareField(
ResumeFollowRequest::setMaxWriteBufferSize,
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), FollowConfig.MAX_WRITE_BUFFER_SIZE.getPreferredName()),
PutFollowRequest.MAX_WRITE_BUFFER_SIZE,
ObjectParser.ValueType.STRING);
PARSER.declareField(
ResumeFollowRequest::setMaxRetryDelay,
(p, c) -> TimeValue.parseTimeValue(p.text(), FollowConfig.MAX_RETRY_DELAY_FIELD.getPreferredName()),
PutFollowRequest.MAX_RETRY_DELAY_FIELD,
ObjectParser.ValueType.STRING);
PARSER.declareField(
ResumeFollowRequest::setReadPollTimeout,
(p, c) -> TimeValue.parseTimeValue(p.text(), FollowConfig.READ_POLL_TIMEOUT.getPreferredName()),
PutFollowRequest.READ_POLL_TIMEOUT,
ObjectParser.ValueType.STRING);
}
@Override
protected ResumeFollowRequest doParseInstance(XContentParser parser) throws IOException {
return PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected ResumeFollowRequest createTestInstance() {
ResumeFollowRequest resumeFollowRequest = new ResumeFollowRequest(randomAlphaOfLength(4));
if (randomBoolean()) {
resumeFollowRequest.setMaxOutstandingReadRequests(randomIntBetween(0, Integer.MAX_VALUE));
}
if (randomBoolean()) {
resumeFollowRequest.setMaxOutstandingWriteRequests(randomIntBetween(0, Integer.MAX_VALUE));
}
if (randomBoolean()) {
resumeFollowRequest.setMaxReadRequestOperationCount(randomIntBetween(0, Integer.MAX_VALUE));
}
if (randomBoolean()) {
resumeFollowRequest.setMaxReadRequestSize(new ByteSizeValue(randomNonNegativeLong()));
}
if (randomBoolean()) {
resumeFollowRequest.setMaxWriteBufferCount(randomIntBetween(0, Integer.MAX_VALUE));
}
if (randomBoolean()) {
resumeFollowRequest.setMaxWriteBufferSize(new ByteSizeValue(randomNonNegativeLong()));
}
if (randomBoolean()) {
resumeFollowRequest.setMaxWriteRequestOperationCount(randomIntBetween(0, Integer.MAX_VALUE));
}
if (randomBoolean()) {
resumeFollowRequest.setMaxWriteRequestSize(new ByteSizeValue(randomNonNegativeLong()));
}
if (randomBoolean()) {
resumeFollowRequest.setMaxRetryDelay(new TimeValue(randomNonNegativeLong()));
}
if (randomBoolean()) {
resumeFollowRequest.setReadPollTimeout(new TimeValue(randomNonNegativeLong()));
}
return resumeFollowRequest;
}
}

View File

@ -18,26 +18,33 @@
*/
package org.elasticsearch.client.core;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
public class AcknowledgedResponseTests extends AbstractXContentTestCase<AcknowledgedResponse> {
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
@Override
protected AcknowledgedResponse createTestInstance() {
public class AcknowledgedResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
this::createTestInstance,
AcknowledgedResponseTests::toXContent,
AcknowledgedResponse::fromXContent)
.supportsUnknownFields(false)
.test();
}
private AcknowledgedResponse createTestInstance() {
return new AcknowledgedResponse(randomBoolean());
}
@Override
protected AcknowledgedResponse doParseInstance(XContentParser parser) throws IOException {
return AcknowledgedResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
public static void toXContent(AcknowledgedResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
{
builder.field(response.getFieldName(), response.isAcknowledged());
}
builder.endObject();
}
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.core;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class MultiTermVectorsResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(
this::createParser,
this::createTestInstance,
this::toXContent,
MultiTermVectorsResponse::fromXContent)
.supportsUnknownFields(true)
.randomFieldsExcludeFilter(field ->
field.endsWith("term_vectors") || field.endsWith("terms") || field.endsWith("tokens"))
.test();
}
private void toXContent(MultiTermVectorsResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
List<TermVectorsResponse> termVectorsResponseList = response.getTermVectorsResponses();
if (termVectorsResponseList != null) {
builder.startArray("docs");
for (TermVectorsResponse tvr : termVectorsResponseList) {
TermVectorsResponseTests.toXContent(tvr, builder);
}
builder.endArray();
}
builder.endObject();
}
protected MultiTermVectorsResponse createTestInstance() {
int numberOfResponses = randomIntBetween(0, 5);
List<TermVectorsResponse> responses = new ArrayList<>(numberOfResponses);
for (int i = 0; i < numberOfResponses; i++) {
TermVectorsResponse tvResponse = TermVectorsResponseTests.createTestInstance();
responses.add(tvResponse);
}
return new MultiTermVectorsResponse(responses);
}
}

View File

@ -35,8 +35,8 @@ public class TermVectorsResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
xContentTester(
this::createParser,
this::createTestInstance,
this::toXContent,
TermVectorsResponseTests::createTestInstance,
TermVectorsResponseTests::toXContent,
TermVectorsResponse::fromXContent)
.supportsUnknownFields(true)
.randomFieldsExcludeFilter(field ->
@ -44,7 +44,7 @@ public class TermVectorsResponseTests extends ESTestCase {
.test();
}
private void toXContent(TermVectorsResponse response, XContentBuilder builder) throws IOException {
static void toXContent(TermVectorsResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field("_index", response.getIndex());
builder.field("_type", response.getType());
@ -66,7 +66,7 @@ public class TermVectorsResponseTests extends ESTestCase {
builder.endObject();
}
private void toXContent(TermVectorsResponse.TermVector tv, XContentBuilder builder) throws IOException {
private static void toXContent(TermVectorsResponse.TermVector tv, XContentBuilder builder) throws IOException {
builder.startObject(tv.getFieldName());
// build fields_statistics
if (tv.getFieldStatistics() != null) {
@ -117,7 +117,7 @@ public class TermVectorsResponseTests extends ESTestCase {
}
protected TermVectorsResponse createTestInstance() {
static TermVectorsResponse createTestInstance() {
String index = randomAlphaOfLength(5);
String type = randomAlphaOfLength(5);
String id = String.valueOf(randomIntBetween(1,100));
@ -148,7 +148,7 @@ public class TermVectorsResponseTests extends ESTestCase {
private TermVectorsResponse.TermVector randomTermVector(String fieldName, boolean hasFieldStatistics, boolean hasTermStatistics,
private static TermVectorsResponse.TermVector randomTermVector(String fieldName, boolean hasFieldStatistics, boolean hasTermStatistics,
boolean hasScores, boolean hasOffsets, boolean hasPositions, boolean hasPayloads) {
TermVectorsResponse.TermVector.FieldStatistics fs = null;
if (hasFieldStatistics) {
@ -171,7 +171,7 @@ public class TermVectorsResponseTests extends ESTestCase {
return tv;
}
private TermVectorsResponse.TermVector.Term randomTerm(String termTxt, boolean hasTermStatistics, boolean hasScores,
private static TermVectorsResponse.TermVector.Term randomTerm(String termTxt, boolean hasTermStatistics, boolean hasScores,
boolean hasOffsets, boolean hasPositions, boolean hasPayloads) {
int termFreq = randomInt(10000);

View File

@ -24,6 +24,7 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
@ -35,6 +36,8 @@ import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.ccr.PauseFollowRequest;
import org.elasticsearch.client.ccr.PutFollowRequest;
import org.elasticsearch.client.ccr.PutFollowResponse;
import org.elasticsearch.client.ccr.ResumeFollowRequest;
import org.elasticsearch.client.ccr.UnfollowRequest;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
@ -46,7 +49,6 @@ import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class CCRDocumentationIT extends ESRestHighLevelClientTestCase {
@ -198,11 +200,9 @@ public class CCRDocumentationIT extends ESRestHighLevelClientTestCase {
// Resume follow index, so that it can be paused again:
{
// TODO: Replace this with high level rest client code when resume follow API is available:
final Request req = new Request("POST", "/" + followIndex + "/_ccr/resume_follow");
req.setJsonEntity("{}");
Response res = client().performRequest(req);
assertThat(res.getStatusLine().getStatusCode(), equalTo(200));
ResumeFollowRequest resumeFollowRequest = new ResumeFollowRequest(followIndex);
AcknowledgedResponse resumeResponse = client.ccr().resumeFollow(resumeFollowRequest, RequestOptions.DEFAULT);
assertThat(resumeResponse.isAcknowledged(), is(true));
}
// Replace the empty listener by a blocking listener in test
@ -217,6 +217,164 @@ public class CCRDocumentationIT extends ESRestHighLevelClientTestCase {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testResumeFollow() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// Create leader index:
CreateIndexRequest createIndexRequest = new CreateIndexRequest("leader");
createIndexRequest.settings(Collections.singletonMap("index.soft_deletes.enabled", true));
CreateIndexResponse response = client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
assertThat(response.isAcknowledged(), is(true));
}
String followIndex = "follower";
// Follow index, so that it can be paused:
{
PutFollowRequest putFollowRequest = new PutFollowRequest("local", "leader", followIndex);
PutFollowResponse putFollowResponse = client.ccr().putFollow(putFollowRequest, RequestOptions.DEFAULT);
assertThat(putFollowResponse.isFollowIndexCreated(), is(true));
assertThat(putFollowResponse.isFollowIndexShardsAcked(), is(true));
assertThat(putFollowResponse.isIndexFollowingStarted(), is(true));
}
// Pause follow index, so that it can be resumed:
{
PauseFollowRequest pauseFollowRequest = new PauseFollowRequest(followIndex);
AcknowledgedResponse pauseResponse = client.ccr().pauseFollow(pauseFollowRequest, RequestOptions.DEFAULT);
assertThat(pauseResponse.isAcknowledged(), is(true));
}
// tag::ccr-resume-follow-request
ResumeFollowRequest request = new ResumeFollowRequest(followIndex); // <1>
// end::ccr-resume-follow-request
// tag::ccr-resume-follow-execute
AcknowledgedResponse response =
client.ccr().resumeFollow(request, RequestOptions.DEFAULT);
// end::ccr-resume-follow-execute
// tag::ccr-resume-follow-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::ccr-resume-follow-response
// Pause follow index, so that it can be resumed again:
{
PauseFollowRequest pauseFollowRequest = new PauseFollowRequest(followIndex);
AcknowledgedResponse pauseResponse = client.ccr().pauseFollow(pauseFollowRequest, RequestOptions.DEFAULT);
assertThat(pauseResponse.isAcknowledged(), is(true));
}
// tag::ccr-resume-follow-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ccr-resume-follow-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ccr-resume-follow-execute-async
client.ccr()
.resumeFollowAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::ccr-resume-follow-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testUnfollow() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// Create leader index:
CreateIndexRequest createIndexRequest = new CreateIndexRequest("leader");
createIndexRequest.settings(Collections.singletonMap("index.soft_deletes.enabled", true));
CreateIndexResponse response = client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
assertThat(response.isAcknowledged(), is(true));
}
String followIndex = "follower";
// Follow index, pause and close, so that it can be unfollowed:
{
PutFollowRequest putFollowRequest = new PutFollowRequest("local", "leader", followIndex);
PutFollowResponse putFollowResponse = client.ccr().putFollow(putFollowRequest, RequestOptions.DEFAULT);
assertThat(putFollowResponse.isFollowIndexCreated(), is(true));
assertThat(putFollowResponse.isFollowIndexShardsAcked(), is(true));
assertThat(putFollowResponse.isIndexFollowingStarted(), is(true));
PauseFollowRequest pauseFollowRequest = new PauseFollowRequest(followIndex);
AcknowledgedResponse unfollowResponse = client.ccr().pauseFollow(pauseFollowRequest, RequestOptions.DEFAULT);
assertThat(unfollowResponse.isAcknowledged(), is(true));
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(followIndex);
assertThat(client.indices().close(closeIndexRequest, RequestOptions.DEFAULT).isAcknowledged(), is(true));
}
// tag::ccr-unfollow-request
UnfollowRequest request = new UnfollowRequest(followIndex); // <1>
// end::ccr-unfollow-request
// tag::ccr-unfollow-execute
AcknowledgedResponse response =
client.ccr().unfollow(request, RequestOptions.DEFAULT);
// end::ccr-unfollow-execute
// tag::ccr-unfollow-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::ccr-unfollow-response
// Delete, put follow index, pause and close, so that it can be unfollowed again:
{
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(followIndex);
assertThat(client.indices().delete(deleteIndexRequest, RequestOptions.DEFAULT).isAcknowledged(), is(true));
PutFollowRequest putFollowRequest = new PutFollowRequest("local", "leader", followIndex);
PutFollowResponse putFollowResponse = client.ccr().putFollow(putFollowRequest, RequestOptions.DEFAULT);
assertThat(putFollowResponse.isFollowIndexCreated(), is(true));
assertThat(putFollowResponse.isFollowIndexShardsAcked(), is(true));
assertThat(putFollowResponse.isIndexFollowingStarted(), is(true));
PauseFollowRequest pauseFollowRequest = new PauseFollowRequest(followIndex);
AcknowledgedResponse unfollowResponse = client.ccr().pauseFollow(pauseFollowRequest, RequestOptions.DEFAULT);
assertThat(unfollowResponse.isAcknowledged(), is(true));
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(followIndex);
assertThat(client.indices().close(closeIndexRequest, RequestOptions.DEFAULT).isAcknowledged(), is(true));
}
// tag::ccr-unfollow-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ccr-unfollow-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ccr-unfollow-execute-async
client.ccr()
.unfollowAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::ccr-unfollow-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
static Map<String, Object> toMap(Response response) throws IOException {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false);
}

View File

@ -54,6 +54,8 @@ import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.RethrottleRequest;
import org.elasticsearch.client.core.MultiTermVectorsRequest;
import org.elasticsearch.client.core.MultiTermVectorsResponse;
import org.elasticsearch.client.core.TermVectorsRequest;
import org.elasticsearch.client.core.TermVectorsResponse;
import org.elasticsearch.common.Strings;
@ -1565,10 +1567,12 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
{
// tag::term-vectors-request-artificial
TermVectorsRequest request = new TermVectorsRequest("authors", "_doc");
XContentBuilder docBuilder = XContentFactory.jsonBuilder();
docBuilder.startObject().field("user", "guest-user").endObject();
request.setDoc(docBuilder); // <1>
TermVectorsRequest request = new TermVectorsRequest("authors",
"_doc",
docBuilder); // <1>
// end::term-vectors-request-artificial
// tag::term-vectors-request-optional-arguments
@ -1669,6 +1673,80 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
// Not entirely sure if _mtermvectors belongs to CRUD, and in the absence of a better place, will have it here
public void testMultiTermVectors() throws Exception {
RestHighLevelClient client = highLevelClient();
CreateIndexRequest authorsRequest = new CreateIndexRequest("authors").mapping("_doc", "user", "type=text");
CreateIndexResponse authorsResponse = client.indices().create(authorsRequest, RequestOptions.DEFAULT);
assertTrue(authorsResponse.isAcknowledged());
client.index(new IndexRequest("index", "_doc", "1").source("user", "kimchy"), RequestOptions.DEFAULT);
client.index(new IndexRequest("index", "_doc", "2").source("user", "s1monw"), RequestOptions.DEFAULT);
Response refreshResponse = client().performRequest(new Request("POST", "/authors/_refresh"));
assertEquals(200, refreshResponse.getStatusLine().getStatusCode());
{
// tag::multi-term-vectors-request
MultiTermVectorsRequest request = new MultiTermVectorsRequest(); // <1>
TermVectorsRequest tvrequest1 =
new TermVectorsRequest("authors", "_doc", "1");
tvrequest1.setFields("user");
request.add(tvrequest1); // <2>
XContentBuilder docBuilder = XContentFactory.jsonBuilder();
docBuilder.startObject().field("user", "guest-user").endObject();
TermVectorsRequest tvrequest2 =
new TermVectorsRequest("authors", "_doc", docBuilder);
request.add(tvrequest2); // <3>
// end::multi-term-vectors-request
}
// tag::multi-term-vectors-request-template
TermVectorsRequest tvrequestTemplate =
new TermVectorsRequest("authors", "_doc", "fake_id"); // <1>
tvrequestTemplate.setFields("user");
String[] ids = {"1", "2"};
MultiTermVectorsRequest request =
new MultiTermVectorsRequest(ids, tvrequestTemplate); // <2>
// end::multi-term-vectors-request-template
// tag::multi-term-vectors-execute
MultiTermVectorsResponse response =
client.mtermvectors(request, RequestOptions.DEFAULT);
// end::multi-term-vectors-execute
// tag::multi-term-vectors-response
List<TermVectorsResponse> tvresponseList =
response.getTermVectorsResponses(); // <1>
if (tvresponseList != null) {
for (TermVectorsResponse tvresponse : tvresponseList) {
}
}
// end::multi-term-vectors-response
ActionListener<MultiTermVectorsResponse> listener;
// tag::multi-term-vectors-execute-listener
listener = new ActionListener<MultiTermVectorsResponse>() {
@Override
public void onResponse(MultiTermVectorsResponse mtvResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::multi-term-vectors-execute-listener
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::multi-term-vectors-execute-async
client.mtermvectorsAsync(
request, RequestOptions.DEFAULT, listener); // <1>
// end::multi-term-vectors-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
@SuppressWarnings("unused")
public void testMultiGet() throws Exception {
RestHighLevelClient client = highLevelClient();

View File

@ -22,6 +22,7 @@ package org.elasticsearch.client.documentation;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
@ -29,16 +30,31 @@ import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.indexlifecycle.DeleteAction;
import org.elasticsearch.client.indexlifecycle.DeleteLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.ExplainLifecycleRequest;
import org.elasticsearch.client.indexlifecycle.GetLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.GetLifecyclePolicyResponse;
import org.elasticsearch.client.indexlifecycle.LifecycleAction;
import org.elasticsearch.client.indexlifecycle.LifecycleManagementStatusRequest;
import org.elasticsearch.client.indexlifecycle.LifecycleManagementStatusResponse;
import org.elasticsearch.client.indexlifecycle.LifecyclePolicy;
import org.elasticsearch.client.indexlifecycle.OperationMode;
import org.elasticsearch.client.indexlifecycle.LifecyclePolicyMetadata;
import org.elasticsearch.client.indexlifecycle.Phase;
import org.elasticsearch.client.indexlifecycle.PutLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.RetryLifecyclePolicyRequest;
import org.elasticsearch.client.indexlifecycle.RolloverAction;
import org.elasticsearch.client.indexlifecycle.StartILMRequest;
import org.elasticsearch.client.indexlifecycle.StopILMRequest;
import org.elasticsearch.client.indexlifecycle.ShrinkAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Collections;
@ -47,6 +63,8 @@ import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
public void testPutLifecyclePolicy() throws Exception {
@ -59,14 +77,14 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
new ByteSizeValue(50, ByteSizeUnit.GB), null, null));
phases.put("hot", new Phase("hot", TimeValue.ZERO, hotActions)); // <1>
Map<String, LifecycleAction> deleteActions =
Map<String, LifecycleAction> deleteActions =
Collections.singletonMap(DeleteAction.NAME, new DeleteAction());
phases.put("delete", new Phase("delete",
phases.put("delete", new Phase("delete",
new TimeValue(90, TimeUnit.DAYS), deleteActions)); // <2>
LifecyclePolicy policy = new LifecyclePolicy("my_policy",
phases); // <3>
PutLifecyclePolicyRequest request =
PutLifecyclePolicyRequest request =
new PutLifecyclePolicyRequest(policy);
// end::ilm-put-lifecycle-policy-request
@ -83,10 +101,10 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
// Delete the policy so it can be added again
{
DeleteLifecyclePolicyRequest deleteRequest =
DeleteLifecyclePolicyRequest deleteRequest =
new DeleteLifecyclePolicyRequest("my_policy");
AcknowledgedResponse deleteResponse = client.indexLifecycle()
.deleteLifecyclePolicy(deleteRequest,
.deleteLifecyclePolicy(deleteRequest,
RequestOptions.DEFAULT);
assertTrue(deleteResponse.isAcknowledged());
}
@ -111,7 +129,7 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-put-lifecycle-policy-execute-async
client.indexLifecycle().putLifecyclePolicyAsync(request,
client.indexLifecycle().putLifecyclePolicyAsync(request,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-put-lifecycle-policy-execute-async
@ -119,6 +137,399 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
}
public void testDeletePolicy() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
// Set up a policy so we have something to delete
PutLifecyclePolicyRequest putRequest;
{
Map<String, Phase> phases = new HashMap<>();
Map<String, LifecycleAction> hotActions = new HashMap<>();
hotActions.put(RolloverAction.NAME, new RolloverAction(
new ByteSizeValue(50, ByteSizeUnit.GB), null, null));
phases.put("hot", new Phase("hot", TimeValue.ZERO, hotActions));
Map<String, LifecycleAction> deleteActions =
Collections.singletonMap(DeleteAction.NAME,
new DeleteAction());
phases.put("delete",
new Phase("delete",
new TimeValue(90, TimeUnit.DAYS), deleteActions));
LifecyclePolicy myPolicy = new LifecyclePolicy("my_policy", phases);
putRequest = new PutLifecyclePolicyRequest(myPolicy);
AcknowledgedResponse putResponse = client.indexLifecycle().
putLifecyclePolicy(putRequest, RequestOptions.DEFAULT);
assertTrue(putResponse.isAcknowledged());
}
// tag::ilm-delete-lifecycle-policy-request
DeleteLifecyclePolicyRequest request =
new DeleteLifecyclePolicyRequest("my_policy"); // <1>
// end::ilm-delete-lifecycle-policy-request
// tag::ilm-delete-lifecycle-policy-execute
AcknowledgedResponse response = client.indexLifecycle()
.deleteLifecyclePolicy(request, RequestOptions.DEFAULT);
// end::ilm-delete-lifecycle-policy-execute
// tag::ilm-delete-lifecycle-policy-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::ilm-delete-lifecycle-policy-response
assertTrue(acknowledged);
// Put the policy again so we can delete it again
{
AcknowledgedResponse putResponse = client.indexLifecycle().
putLifecyclePolicy(putRequest, RequestOptions.DEFAULT);
assertTrue(putResponse.isAcknowledged());
}
// tag::ilm-delete-lifecycle-policy-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-delete-lifecycle-policy-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-delete-lifecycle-policy-execute-async
client.indexLifecycle().deleteLifecyclePolicyAsync(request,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-delete-lifecycle-policy-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testGetLifecyclePolicy() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
LifecyclePolicy myPolicyAsPut;
LifecyclePolicy otherPolicyAsPut;
// Set up some policies so we have something to get
{
Map<String, Phase> phases = new HashMap<>();
Map<String, LifecycleAction> hotActions = new HashMap<>();
hotActions.put(RolloverAction.NAME, new RolloverAction(
new ByteSizeValue(50, ByteSizeUnit.GB), null, null));
phases.put("hot", new Phase("hot", TimeValue.ZERO, hotActions));
Map<String, LifecycleAction> deleteActions =
Collections.singletonMap(DeleteAction.NAME,
new DeleteAction());
phases.put("delete",
new Phase("delete",
new TimeValue(90, TimeUnit.DAYS), deleteActions));
myPolicyAsPut = new LifecyclePolicy("my_policy", phases);
PutLifecyclePolicyRequest putRequest = new PutLifecyclePolicyRequest(myPolicyAsPut);
Map<String, Phase> otherPolicyPhases = new HashMap<>(phases);
Map<String, LifecycleAction> warmActions = Collections.singletonMap(ShrinkAction.NAME, new ShrinkAction(1));
otherPolicyPhases.put("warm", new Phase("warm", new TimeValue(30, TimeUnit.DAYS), warmActions));
otherPolicyAsPut = new LifecyclePolicy("other_policy", otherPolicyPhases);
PutLifecyclePolicyRequest putRequest2 = new PutLifecyclePolicyRequest(otherPolicyAsPut);
AcknowledgedResponse putResponse = client.indexLifecycle().
putLifecyclePolicy(putRequest, RequestOptions.DEFAULT);
assertTrue(putResponse.isAcknowledged());
AcknowledgedResponse putResponse2 = client.indexLifecycle().
putLifecyclePolicy(putRequest2, RequestOptions.DEFAULT);
assertTrue(putResponse2.isAcknowledged());
}
// tag::ilm-get-lifecycle-policy-request
GetLifecyclePolicyRequest allRequest =
new GetLifecyclePolicyRequest(); // <1>
GetLifecyclePolicyRequest request =
new GetLifecyclePolicyRequest("my_policy", "other_policy"); // <2>
// end::ilm-get-lifecycle-policy-request
// tag::ilm-get-lifecycle-policy-execute
GetLifecyclePolicyResponse response = client.indexLifecycle()
.getLifecyclePolicy(request, RequestOptions.DEFAULT);
// end::ilm-get-lifecycle-policy-execute
// tag::ilm-get-lifecycle-policy-response
ImmutableOpenMap<String, LifecyclePolicyMetadata> policies =
response.getPolicies();
LifecyclePolicyMetadata myPolicyMetadata =
policies.get("my_policy"); // <1>
String myPolicyName = myPolicyMetadata.getName();
long version = myPolicyMetadata.getVersion();
String lastModified = myPolicyMetadata.getModifiedDateString();
long lastModifiedDate = myPolicyMetadata.getModifiedDate();
LifecyclePolicy myPolicy = myPolicyMetadata.getPolicy(); // <2>
// end::ilm-get-lifecycle-policy-response
assertEquals(myPolicyAsPut, myPolicy);
assertEquals("my_policy", myPolicyName);
assertNotNull(lastModified);
assertNotEquals(0, lastModifiedDate);
LifecyclePolicyMetadata otherPolicyMetadata = policies.get("other_policy");
assertEquals(otherPolicyAsPut, otherPolicyMetadata.getPolicy());
assertEquals("other_policy", otherPolicyMetadata.getName());
assertNotNull(otherPolicyMetadata.getModifiedDateString());
assertNotEquals(0, otherPolicyMetadata.getModifiedDate());
// tag::ilm-get-lifecycle-policy-execute-listener
ActionListener<GetLifecyclePolicyResponse> listener =
new ActionListener<GetLifecyclePolicyResponse>() {
@Override
public void onResponse(GetLifecyclePolicyResponse response)
{
ImmutableOpenMap<String, LifecyclePolicyMetadata>
policies = response.getPolicies(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-get-lifecycle-policy-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-get-lifecycle-policy-execute-async
client.indexLifecycle().getLifecyclePolicyAsync(request,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-get-lifecycle-policy-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testStartStopStatus() throws Exception {
RestHighLevelClient client = highLevelClient();
stopILM(client);
// tag::ilm-status-request
LifecycleManagementStatusRequest request =
new LifecycleManagementStatusRequest();
// end::ilm-status-request
// Check that ILM has stopped
{
// tag::ilm-status-execute
LifecycleManagementStatusResponse response =
client.indexLifecycle()
.lifecycleManagementStatus(request, RequestOptions.DEFAULT);
// end::ilm-status-execute
// tag::ilm-status-response
OperationMode operationMode = response.getOperationMode(); // <1>
// end::ilm-status-response
assertThat(operationMode, Matchers.either(equalTo(OperationMode.STOPPING)).or(equalTo(OperationMode.STOPPED)));
}
startILM(client);
// tag::ilm-status-execute-listener
ActionListener<LifecycleManagementStatusResponse> listener =
new ActionListener<LifecycleManagementStatusResponse>() {
@Override
public void onResponse(
LifecycleManagementStatusResponse response) {
OperationMode operationMode = response
.getOperationMode(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-status-execute-listener
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-status-execute-async
client.indexLifecycle().lifecycleManagementStatusAsync(request,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-status-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
// Check that ILM is running again
LifecycleManagementStatusResponse response =
client.indexLifecycle()
.lifecycleManagementStatus(request, RequestOptions.DEFAULT);
OperationMode operationMode = response.getOperationMode();
assertEquals(OperationMode.RUNNING, operationMode);
}
private void stopILM(RestHighLevelClient client) throws IOException, InterruptedException {
// tag::ilm-stop-ilm-request
StopILMRequest request = new StopILMRequest();
// end::ilm-stop-ilm-request
// tag::ilm-stop-ilm-execute
AcknowledgedResponse response = client.indexLifecycle()
.stopILM(request, RequestOptions.DEFAULT);
// end::ilm-stop-ilm-execute
// tag::ilm-stop-ilm-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::ilm-stop-ilm-response
assertTrue(acknowledged);
// tag::ilm-stop-ilm-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-stop-ilm-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-stop-ilm-execute-async
client.indexLifecycle().stopILMAsync(request,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-stop-ilm-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
private void startILM(RestHighLevelClient client) throws IOException, InterruptedException {
// tag::ilm-start-ilm-request
StartILMRequest request1 = new StartILMRequest();
// end::ilm-start-ilm-request
// tag::ilm-start-ilm-execute
AcknowledgedResponse response = client.indexLifecycle()
.startILM(request1, RequestOptions.DEFAULT);
// end::ilm-start-ilm-execute
// tag::ilm-start-ilm-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::ilm-start-ilm-response
assertTrue(acknowledged);
// tag::ilm-start-ilm-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-start-ilm-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-start-ilm-execute-async
client.indexLifecycle().startILMAsync(request1,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-start-ilm-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testRetryPolicy() throws Exception {
RestHighLevelClient client = highLevelClient();
// setup policy to immediately fail on index
{
Map<String, Phase> phases = new HashMap<>();
Map<String, LifecycleAction> warmActions = new HashMap<>();
warmActions.put(ShrinkAction.NAME, new ShrinkAction(1));
phases.put("warm", new Phase("warm", TimeValue.ZERO, warmActions));
LifecyclePolicy policy = new LifecyclePolicy("my_policy",
phases);
PutLifecyclePolicyRequest putRequest =
new PutLifecyclePolicyRequest(policy);
client.indexLifecycle().putLifecyclePolicy(putRequest, RequestOptions.DEFAULT);
CreateIndexRequest createIndexRequest = new CreateIndexRequest("my_index",
Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put("index.lifecycle.name", "my_policy")
.build());
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
assertBusy(() -> assertNotNull(client.indexLifecycle()
.explainLifecycle(new ExplainLifecycleRequest().indices("my_index"), RequestOptions.DEFAULT)
.getIndexResponses().get("my_index").getFailedStep()));
}
// tag::ilm-retry-lifecycle-policy-request
RetryLifecyclePolicyRequest request =
new RetryLifecyclePolicyRequest("my_index"); // <1>
// end::ilm-retry-lifecycle-policy-request
// tag::ilm-retry-lifecycle-policy-execute
AcknowledgedResponse response = client.indexLifecycle()
.retryLifecyclePolicy(request, RequestOptions.DEFAULT);
// end::ilm-retry-lifecycle-policy-execute
// tag::ilm-retry-lifecycle-policy-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::ilm-retry-lifecycle-policy-response
assertTrue(acknowledged);
// tag::ilm-retry-lifecycle-policy-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ilm-retry-lifecycle-policy-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ilm-retry-lifecycle-policy-execute-async
client.indexLifecycle().retryLifecyclePolicyAsync(request,
RequestOptions.DEFAULT, listener); // <1>
// end::ilm-retry-lifecycle-policy-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
static Map<String, Object> toMap(Response response) throws IOException {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false);
}

View File

@ -35,17 +35,22 @@ import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.ForecastJobRequest;
import org.elasticsearch.client.ml.ForecastJobResponse;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsResponse;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetCategoriesRequest;
@ -70,10 +75,13 @@ import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostCalendarEventRequest;
import org.elasticsearch.client.ml.PostCalendarEventResponse;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
@ -82,6 +90,8 @@ import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutFilterResponse;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.PutJobResponse;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotResponse;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
@ -89,11 +99,16 @@ import org.elasticsearch.client.ml.StopDatafeedResponse;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotResponse;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.client.ml.datafeed.ChunkingConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.client.ml.datafeed.DelayedDataCheckConfig;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
import org.elasticsearch.client.ml.job.config.AnalysisLimits;
import org.elasticsearch.client.ml.job.config.DataDescription;
@ -582,6 +597,14 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
datafeedBuilder.setQueryDelay(TimeValue.timeValueMinutes(1)); // <1>
// end::put-datafeed-config-set-query-delay
// tag::put-datafeed-config-set-delayed-data-check-config
datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig
.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(1))); // <1>
// end::put-datafeed-config-set-delayed-data-check-config
// no need to accidentally trip internal validations due to job bucket size
datafeedBuilder.setDelayedDataCheckConfig(null);
List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList();
// tag::put-datafeed-config-set-script-fields
datafeedBuilder.setScriptFields(scriptFields); // <1>
@ -1867,6 +1890,73 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
public void testDeleteModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-delete-model-snapshot";
String snapshotId = "1541587919";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"" + jobId + "\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"" + snapshotId + "\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"" + jobId + "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
{
client.index(indexRequest, RequestOptions.DEFAULT);
// tag::delete-model-snapshot-request
DeleteModelSnapshotRequest request = new DeleteModelSnapshotRequest(jobId, snapshotId); // <1>
// end::delete-model-snapshot-request
// tag::delete-model-snapshot-execute
AcknowledgedResponse response = client.machineLearning().deleteModelSnapshot(request, RequestOptions.DEFAULT);
// end::delete-model-snapshot-execute
// tag::delete-model-snapshot-response
boolean isAcknowledged = response.isAcknowledged(); // <1>
// end::delete-model-snapshot-response
assertTrue(isAcknowledged);
}
{
client.index(indexRequest, RequestOptions.DEFAULT);
// tag::delete-model-snapshot-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteModelSnapshotRequest deleteModelSnapshotRequest = new DeleteModelSnapshotRequest(jobId, "1541587919");
// tag::delete-model-snapshot-execute-async
client.machineLearning().deleteModelSnapshotAsync(deleteModelSnapshotRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetModelSnapshots() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
@ -1963,6 +2053,158 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
public void testRevertModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-revert-model-snapshot";
String snapshotId = "1541587919";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
String documentId = jobId + "_model_snapshot_" + snapshotId;
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc", documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"test-revert-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false, " +
"\"quantiles\":{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " +
"\"quantile_state\":\"state\"}}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::revert-model-snapshot-request
RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId); // <1>
// end::revert-model-snapshot-request
// tag::revert-model-snapshot-delete-intervening-results
request.setDeleteInterveningResults(true); // <1>
// end::revert-model-snapshot-delete-intervening-results
// tag::revert-model-snapshot-execute
RevertModelSnapshotResponse response = client.machineLearning().revertModelSnapshot(request, RequestOptions.DEFAULT);
// end::revert-model-snapshot-execute
// tag::revert-model-snapshot-response
ModelSnapshot modelSnapshot = response.getModel(); // <1>
// end::revert-model-snapshot-response
assertEquals(snapshotId, modelSnapshot.getSnapshotId());
assertEquals("State persisted due to job close at 2018-11-07T10:51:59+0000", modelSnapshot.getDescription());
assertEquals(51722, modelSnapshot.getModelSizeStats().getModelBytes());
}
{
RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId);
// tag::revert-model-snapshot-execute-listener
ActionListener<RevertModelSnapshotResponse> listener =
new ActionListener<RevertModelSnapshotResponse>() {
@Override
public void onResponse(RevertModelSnapshotResponse revertModelSnapshotResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::revert-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::revert-model-snapshot-execute-async
client.machineLearning().revertModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::revert-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-update-model-snapshot";
String snapshotId = "1541587919";
String documentId = jobId + "_model_snapshot_" + snapshotId;
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc", documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-update-model-snapshot\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"test-update-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::update-model-snapshot-request
UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId); // <1>
// end::update-model-snapshot-request
// tag::update-model-snapshot-description
request.setDescription("My Snapshot"); // <1>
// end::update-model-snapshot-description
// tag::update-model-snapshot-retain
request.setRetain(true); // <1>
// end::update-model-snapshot-retain
// tag::update-model-snapshot-execute
UpdateModelSnapshotResponse response = client.machineLearning().updateModelSnapshot(request, RequestOptions.DEFAULT);
// end::update-model-snapshot-execute
// tag::update-model-snapshot-response
boolean acknowledged = response.getAcknowledged(); // <1>
ModelSnapshot modelSnapshot = response.getModel(); // <2>
// end::update-model-snapshot-response
assertTrue(acknowledged);
assertEquals("My Snapshot", modelSnapshot.getDescription()); }
{
UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId);
// tag::update-model-snapshot-execute-listener
ActionListener<UpdateModelSnapshotResponse> listener =
new ActionListener<UpdateModelSnapshotResponse>() {
@Override
public void onResponse(UpdateModelSnapshotResponse updateModelSnapshotResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-model-snapshot-execute-async
client.machineLearning().updateModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::update-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutCalendar() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
@ -2005,6 +2247,112 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testPutCalendarJob() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
// tag::put-calendar-job-request
PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", // <1>
"job_2", "job_group_1"); // <2>
// end::put-calendar-job-request
// tag::put-calendar-job-execute
PutCalendarResponse response = client.machineLearning().putCalendarJob(request, RequestOptions.DEFAULT);
// end::put-calendar-job-execute
// tag::put-calendar-job-response
Calendar updatedCalendar = response.getCalendar(); // <1>
// end::put-calendar-job-response
assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_1", "job_2", "job_group_1"));
}
{
PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", "job_4");
// tag::put-calendar-job-execute-listener
ActionListener<PutCalendarResponse> listener =
new ActionListener<PutCalendarResponse>() {
@Override
public void onResponse(PutCalendarResponse putCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-calendar-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-calendar-job-execute-async
client.machineLearning().putCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-calendar-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteCalendarJob() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays",
Arrays.asList("job_1", "job_group_1", "job_2"),
"A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
// tag::delete-calendar-job-request
DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", // <1>
"job_1", "job_group_1"); // <2>
// end::delete-calendar-job-request
// tag::delete-calendar-job-execute
PutCalendarResponse response = client.machineLearning().deleteCalendarJob(request, RequestOptions.DEFAULT);
// end::delete-calendar-job-execute
// tag::delete-calendar-job-response
Calendar updatedCalendar = response.getCalendar(); // <1>
// end::delete-calendar-job-response
assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_2"));
}
{
DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", "job_2");
// tag::delete-calendar-job-execute-listener
ActionListener<PutCalendarResponse> listener =
new ActionListener<PutCalendarResponse>() {
@Override
public void onResponse(PutCalendarResponse deleteCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-calendar-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-calendar-job-execute-async
client.machineLearning().deleteCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-calendar-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetCalendar() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
@ -2113,6 +2461,136 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testGetCalendarEvent() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null));
client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT);
{
// tag::get-calendar-events-request
GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays"); // <1>
// end::get-calendar-events-request
// tag::get-calendar-events-page
request.setPageParams(new PageParams(10, 20)); // <1>
// end::get-calendar-events-page
// tag::get-calendar-events-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-calendar-events-start
// tag::get-calendar-events-end
request.setEnd("2018-08-02T00:00:00Z"); // <1>
// end::get-calendar-events-end
// tag::get-calendar-events-jobid
request.setJobId("job_1"); // <1>
// end::get-calendar-events-jobid
// reset params
request.setPageParams(null);
request.setJobId(null);
request.setStart(null);
request.setEnd(null);
// tag::get-calendar-events-execute
GetCalendarEventsResponse response = client.machineLearning().getCalendarEvents(request, RequestOptions.DEFAULT);
// end::get-calendar-events-execute
// tag::get-calendar-events-response
long count = response.count(); // <1>
List<ScheduledEvent> scheduledEvents = response.events(); // <2>
// end::get-calendar-events-response
assertEquals(1, scheduledEvents.size());
}
{
GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays");
// tag::get-calendar-events-execute-listener
ActionListener<GetCalendarEventsResponse> listener =
new ActionListener<GetCalendarEventsResponse>() {
@Override
public void onResponse(GetCalendarEventsResponse getCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-calendar-events-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-calendar-events-execute-async
client.machineLearning().getCalendarEventsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-calendar-events-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPostCalendarEvent() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null));
// tag::post-calendar-event-request
PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", // <1>
events); // <2>
// end::post-calendar-event-request
// tag::post-calendar-event-execute
PostCalendarEventResponse response = client.machineLearning().postCalendarEvent(request, RequestOptions.DEFAULT);
// end::post-calendar-event-execute
// tag::post-calendar-event-response
List<ScheduledEvent> scheduledEvents = response.getScheduledEvents(); // <1>
// end::post-calendar-event-response
assertEquals(1, scheduledEvents.size());
}
{
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance());
PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", events); // <1>
// tag::post-calendar-event-execute-listener
ActionListener<PostCalendarEventResponse> listener =
new ActionListener<PostCalendarEventResponse>() {
@Override
public void onResponse(PostCalendarEventResponse postCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::post-calendar-event-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::post-calendar-event-execute-async
client.machineLearning().postCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::post-calendar-event-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testCreateFilter() throws Exception {
RestHighLevelClient client = highLevelClient();
{
@ -2207,16 +2685,16 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::get-filters-execute-listener
ActionListener<GetFiltersResponse> listener = new ActionListener<GetFiltersResponse>() {
@Override
public void onResponse(GetFiltersResponse getfiltersResponse) {
// <1>
}
@Override
public void onResponse(GetFiltersResponse getfiltersResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-filters-execute-listener
// Replace the empty listener by a blocking listener in test
@ -2292,4 +2770,62 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteFilter() throws Exception {
RestHighLevelClient client = highLevelClient();
String filterId = createFilter(client);
{
// tag::delete-filter-request
DeleteFilterRequest request = new DeleteFilterRequest(filterId); // <1>
// end::delete-filter-request
// tag::delete-filter-execute
AcknowledgedResponse response = client.machineLearning().deleteFilter(request, RequestOptions.DEFAULT);
// end::delete-filter-execute
// tag::delete-filter-response
boolean isAcknowledged = response.isAcknowledged(); // <1>
// end::delete-filter-response
assertTrue(isAcknowledged);
}
filterId = createFilter(client);
{
DeleteFilterRequest request = new DeleteFilterRequest(filterId);
// tag::delete-filter-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-filter-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-filter-execute-async
client.machineLearning().deleteFilterAsync(request, RequestOptions.DEFAULT, listener); //<1>
// end::delete-filter-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
private String createFilter(RestHighLevelClient client) throws IOException {
MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains")
.setDescription("A list of safe domains")
.setItems("*.google.com", "wikipedia.org");
PutFilterRequest putFilterRequest = new PutFilterRequest(filterBuilder.build());
PutFilterResponse putFilterResponse = client.machineLearning().putFilter(putFilterRequest, RequestOptions.DEFAULT);
MlFilter createdFilter = putFilterResponse.getResponse();
assertThat(createdFilter.getId(), equalTo("my_safe_domains"));
return createdFilter.getId();
}
}

View File

@ -32,8 +32,8 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.RollupClient;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.client.rollup.DeleteRollupJobRequest;
import org.elasticsearch.client.rollup.DeleteRollupJobResponse;
import org.elasticsearch.client.rollup.GetRollupCapsRequest;
import org.elasticsearch.client.rollup.GetRollupCapsResponse;
import org.elasticsearch.client.rollup.GetRollupIndexCapsRequest;
@ -44,7 +44,6 @@ import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats;
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus;
import org.elasticsearch.client.rollup.PutRollupJobRequest;
import org.elasticsearch.client.rollup.PutRollupJobResponse;
import org.elasticsearch.client.rollup.RollableIndexCaps;
import org.elasticsearch.client.rollup.RollupJobCaps;
import org.elasticsearch.client.rollup.StartRollupJobRequest;
@ -148,7 +147,7 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
//end::x-pack-rollup-put-rollup-job-request
//tag::x-pack-rollup-put-rollup-job-execute
PutRollupJobResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
AcknowledgedResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
//end::x-pack-rollup-put-rollup-job-execute
//tag::x-pack-rollup-put-rollup-job-response
@ -161,9 +160,9 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
RollupJobConfig config = new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout);
PutRollupJobRequest request = new PutRollupJobRequest(config);
// tag::x-pack-rollup-put-rollup-job-execute-listener
ActionListener<PutRollupJobResponse> listener = new ActionListener<PutRollupJobResponse>() {
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(PutRollupJobResponse response) {
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@ -288,6 +287,8 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
String id = "job_1";
// tag::rollup-stop-job-request
StopRollupJobRequest request = new StopRollupJobRequest(id); // <1>
request.waitForCompletion(true); // <2>
request.timeout(TimeValue.timeValueSeconds(10)); // <3>
// end::rollup-stop-job-request
@ -354,7 +355,7 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
pageSize, groups, metrics, timeout);
PutRollupJobRequest request = new PutRollupJobRequest(config);
PutRollupJobResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
AcknowledgedResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
boolean acknowledged = response.isAcknowledged();
//end::x-pack-rollup-get-rollup-caps-setup
@ -470,7 +471,7 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
pageSize, groups, metrics, timeout);
PutRollupJobRequest request = new PutRollupJobRequest(config);
PutRollupJobResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
AcknowledgedResponse response = client.rollup().putRollupJob(request, RequestOptions.DEFAULT);
boolean acknowledged = response.isAcknowledged();
//end::x-pack-rollup-get-rollup-index-caps-setup
@ -570,7 +571,7 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
// end::rollup-delete-job-request
try {
// tag::rollup-delete-job-execute
DeleteRollupJobResponse response = client.rollup().deleteRollupJob(request, RequestOptions.DEFAULT);
AcknowledgedResponse response = client.rollup().deleteRollupJob(request, RequestOptions.DEFAULT);
// end::rollup-delete-job-execute
// tag::rollup-delete-job-response
@ -581,9 +582,9 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
}
// tag::rollup-delete-job-execute-listener
ActionListener<DeleteRollupJobResponse> listener = new ActionListener<DeleteRollupJobResponse>() {
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(DeleteRollupJobResponse response) {
public void onResponse(AcknowledgedResponse response) {
boolean acknowledged = response.isAcknowledged(); // <1>
}

View File

@ -174,8 +174,8 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
sourceBuilder.fetchSource(false);
// end::search-source-filtering-off
// tag::search-source-filtering-includes
String[] includeFields = new String[] {"title", "user", "innerObject.*"};
String[] excludeFields = new String[] {"_type"};
String[] includeFields = new String[] {"title", "innerObject.*"};
String[] excludeFields = new String[] {"user"};
sourceBuilder.fetchSource(includeFields, excludeFields);
// end::search-source-filtering-includes
sourceBuilder.fetchSource(true);
@ -247,7 +247,6 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
for (SearchHit hit : searchHits) {
// tag::search-hits-singleHit-properties
String index = hit.getIndex();
String type = hit.getType();
String id = hit.getId();
float score = hit.getScore();
// end::search-hits-singleHit-properties
@ -263,8 +262,8 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
assertEquals(3, totalHits);
assertNotNull(hits.getHits()[0].getSourceAsString());
assertNotNull(hits.getHits()[0].getSourceAsMap().get("title"));
assertNotNull(hits.getHits()[0].getSourceAsMap().get("user"));
assertNotNull(hits.getHits()[0].getSourceAsMap().get("innerObject"));
assertNull(hits.getHits()[0].getSourceAsMap().get("user"));
}
}
@ -1242,18 +1241,6 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
{
// tag::multi-search-request-index
MultiSearchRequest request = new MultiSearchRequest();
request.add(new SearchRequest("posts") // <1>
.types("doc")); // <2>
// end::multi-search-request-index
MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT);
MultiSearchResponse.Item firstResponse = response.getResponses()[0];
assertNull(firstResponse.getFailure());
SearchResponse searchResponse = firstResponse.getResponse();
assertEquals(3, searchResponse.getHits().getTotalHits());
}
}
private void indexSearchTestData() throws IOException {
@ -1304,19 +1291,12 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
// end::count-request-basic
}
{
// tag::count-request-indices-types
CountRequest countRequest = new CountRequest("blog"); // <1>
countRequest.types("doc"); // <2>
// end::count-request-indices-types
// tag::count-request-routing
countRequest.routing("routing"); // <1>
// end::count-request-routing
// tag::count-request-indicesOptions
countRequest.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1>
// end::count-request-indicesOptions
// tag::count-request-preference
countRequest.preference("_local"); // <1>
// end::count-request-preference
// tag::count-request-args
CountRequest countRequest = new CountRequest("blog") // <1>
.routing("routing") // <2>
.indicesOptions(IndicesOptions.lenientExpandOpen()) // <3>
.preference("_local"); // <4>
// end::count-request-args
assertNotNull(client.count(countRequest, RequestOptions.DEFAULT));
}
{

View File

@ -51,6 +51,8 @@ import org.elasticsearch.client.security.ExpressionRoleMapping;
import org.elasticsearch.client.security.GetRoleMappingsRequest;
import org.elasticsearch.client.security.GetRoleMappingsResponse;
import org.elasticsearch.client.security.GetSslCertificatesResponse;
import org.elasticsearch.client.security.HasPrivilegesRequest;
import org.elasticsearch.client.security.HasPrivilegesResponse;
import org.elasticsearch.client.security.InvalidateTokenRequest;
import org.elasticsearch.client.security.InvalidateTokenResponse;
import org.elasticsearch.client.security.PutRoleMappingRequest;
@ -63,7 +65,9 @@ import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpress
import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
import org.elasticsearch.client.security.user.User;
import org.elasticsearch.client.security.user.privileges.IndicesPrivileges;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.RestStatus;
import org.hamcrest.Matchers;
@ -80,6 +84,7 @@ import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.emptyIterable;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isIn;
@ -437,6 +442,67 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
public void testHasPrivileges() throws Exception {
RestHighLevelClient client = highLevelClient();
{
//tag::has-privileges-request
HasPrivilegesRequest request = new HasPrivilegesRequest(
Sets.newHashSet("monitor", "manage"),
Sets.newHashSet(
IndicesPrivileges.builder().indices("logstash-2018-10-05").privileges("read", "write").build(),
IndicesPrivileges.builder().indices("logstash-2018-*").privileges("read").build()
),
null
);
//end::has-privileges-request
//tag::has-privileges-execute
HasPrivilegesResponse response = client.security().hasPrivileges(request, RequestOptions.DEFAULT);
//end::has-privileges-execute
//tag::has-privileges-response
boolean hasMonitor = response.hasClusterPrivilege("monitor"); // <1>
boolean hasWrite = response.hasIndexPrivilege("logstash-2018-10-05", "write"); // <2>
boolean hasRead = response.hasIndexPrivilege("logstash-2018-*", "read"); // <3>
//end::has-privileges-response
assertThat(response.getUsername(), is("test_user"));
assertThat(response.hasAllRequested(), is(true));
assertThat(hasMonitor, is(true));
assertThat(hasWrite, is(true));
assertThat(hasRead, is(true));
assertThat(response.getApplicationPrivileges().entrySet(), emptyIterable());
}
{
HasPrivilegesRequest request = new HasPrivilegesRequest(Collections.singleton("monitor"),null,null);
// tag::has-privileges-execute-listener
ActionListener<HasPrivilegesResponse> listener = new ActionListener<HasPrivilegesResponse>() {
@Override
public void onResponse(HasPrivilegesResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::has-privileges-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::has-privileges-execute-async
client.security().hasPrivilegesAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::has-privileges-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testClearRealmCache() throws Exception {
RestHighLevelClient client = highLevelClient();
{

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.test.ESTestCase;
public class DeleteCalendarJobRequestTests extends ESTestCase {
public void testWithNullId() {
NullPointerException ex = expectThrows(NullPointerException.class,
() -> new DeleteCalendarJobRequest(null, "job1"));
assertEquals("[calendar_id] must not be null.", ex.getMessage());
}
public void testSetJobIds() {
String calendarId = randomAlphaOfLength(10);
NullPointerException ex = expectThrows(NullPointerException.class,
() ->new DeleteCalendarJobRequest(calendarId, "job1", null));
assertEquals("jobIds must not contain null values.", ex.getMessage());
IllegalArgumentException illegalArgumentException =
expectThrows(IllegalArgumentException.class, () -> new DeleteCalendarJobRequest(calendarId));
assertEquals("jobIds must not be empty.", illegalArgumentException.getMessage());
}
}

View File

@ -0,0 +1,35 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.test.ESTestCase;
public class DeleteFilterRequestTests extends ESTestCase {
public void test_WithNullFilter() {
NullPointerException ex = expectThrows(NullPointerException.class, () -> new DeleteFilterRequest(null));
assertEquals("[filter_id] is required", ex.getMessage());
}
public void test_instance() {
String filterId = randomAlphaOfLengthBetween(2, 10);
DeleteFilterRequest deleteFilterRequest = new DeleteFilterRequest(filterId);
assertEquals(deleteFilterRequest.getId(), filterId);
}
}

View File

@ -0,0 +1,40 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.test.ESTestCase;
public class DeleteModelSnapshotRequestTests extends ESTestCase {
public void test_WithNullJobId() {
NullPointerException ex = expectThrows(NullPointerException.class, () ->
new DeleteModelSnapshotRequest(null, randomAlphaOfLength(10)));
assertEquals("[job_id] must not be null", ex.getMessage());
}
public void test_WithNullSnapshotId() {
NullPointerException ex = expectThrows(NullPointerException.class, ()
-> new DeleteModelSnapshotRequest(randomAlphaOfLength(10), null));
assertEquals("[snapshot_id] must not be null", ex.getMessage());
}
private DeleteModelSnapshotRequest createTestInstance() {
return new DeleteModelSnapshotRequest(randomAlphaOfLength(10), randomAlphaOfLength(10));
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
public class GetCalendarEventsRequestTests extends AbstractXContentTestCase<GetCalendarEventsRequest> {
@Override
protected GetCalendarEventsRequest createTestInstance() {
String calendarId = randomAlphaOfLengthBetween(1, 10);
GetCalendarEventsRequest request = new GetCalendarEventsRequest(calendarId);
if (randomBoolean()) {
request.setPageParams(new PageParams(1, 2));
}
if (randomBoolean()) {
request.setEnd(randomAlphaOfLength(10));
}
if (randomBoolean()) {
request.setStart(randomAlphaOfLength(10));
}
if (randomBoolean()) {
request.setJobId(randomAlphaOfLength(10));
}
return request;
}
@Override
protected GetCalendarEventsRequest doParseInstance(XContentParser parser) {
return GetCalendarEventsRequest.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class GetCalendarEventsResponseTests extends AbstractXContentTestCase<GetCalendarEventsResponse> {
@Override
protected GetCalendarEventsResponse createTestInstance() {
String calendarId = randomAlphaOfLength(10);
List<ScheduledEvent> scheduledEvents = new ArrayList<>();
int count = randomIntBetween(0, 3);
for (int i=0; i<count; i++) {
scheduledEvents.add(ScheduledEventTests.testInstance(calendarId, randomAlphaOfLength(10)));
}
return new GetCalendarEventsResponse(scheduledEvents, count);
}
@Override
protected GetCalendarEventsResponse doParseInstance(XContentParser parser) throws IOException {
return GetCalendarEventsResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class PostCalendarEventRequestTests extends AbstractXContentTestCase<PostCalendarEventRequest> {
@Override
protected PostCalendarEventRequest createTestInstance() {
String calendarId = randomAlphaOfLength(10);
int numberOfEvents = randomIntBetween(1, 10);
List<ScheduledEvent> events = new ArrayList<>(numberOfEvents);
for (int i = 0; i < numberOfEvents; i++) {
events.add(ScheduledEventTests.testInstance());
}
return new PostCalendarEventRequest(calendarId, events);
}
@Override
protected PostCalendarEventRequest doParseInstance(XContentParser parser) throws IOException {
return PostCalendarEventRequest.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
}

View File

@ -0,0 +1,51 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class PostCalendarEventResponseTests extends AbstractXContentTestCase<PostCalendarEventResponse> {
@Override
protected PostCalendarEventResponse createTestInstance() {
int numberOfEvents = randomIntBetween(1, 10);
List<ScheduledEvent> events = new ArrayList<>(numberOfEvents);
for (int i = 0; i < numberOfEvents; i++) {
events.add(ScheduledEventTests.testInstance());
}
return new PostCalendarEventResponse(events);
}
@Override
protected PostCalendarEventResponse doParseInstance(XContentParser parser) throws IOException {
return PostCalendarEventResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.test.ESTestCase;
public class PutCalendarJobRequestTests extends ESTestCase {
public void testWithNullId() {
NullPointerException ex = expectThrows(NullPointerException.class,
() -> new PutCalendarJobRequest(null, "job1"));
assertEquals("[calendar_id] must not be null.", ex.getMessage());
}
public void testSetJobIds() {
String calendarId = randomAlphaOfLength(10);
NullPointerException ex = expectThrows(NullPointerException.class,
() ->new PutCalendarJobRequest(calendarId, "job1", null));
assertEquals("jobIds must not contain null values.", ex.getMessage());
IllegalArgumentException illegalArgumentException =
expectThrows(IllegalArgumentException.class, () -> new PutCalendarJobRequest(calendarId));
assertEquals("jobIds must not be empty.", illegalArgumentException.getMessage());
}
}

View File

@ -16,31 +16,31 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.rollup;
package org.elasticsearch.client.ml;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.junit.Before;
import java.io.IOException;
public class PutRollupJobResponseTests extends AbstractXContentTestCase<PutRollupJobResponse> {
private boolean acknowledged;
public class RevertModelSnapshotRequestTests extends AbstractXContentTestCase<RevertModelSnapshotRequest> {
@Before
public void setupJobID() {
acknowledged = randomBoolean();
@Override
protected RevertModelSnapshotRequest createTestInstance() {
String jobId = randomAlphaOfLengthBetween(1, 20);
String snapshotId = randomAlphaOfLengthBetween(1, 20);
RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId);
if (randomBoolean()) {
request.setDeleteInterveningResults(randomBoolean());
}
return request;
}
@Override
protected PutRollupJobResponse createTestInstance() {
return new PutRollupJobResponse(acknowledged);
}
@Override
protected PutRollupJobResponse doParseInstance(XContentParser parser) throws IOException {
return PutRollupJobResponse.fromXContent(parser);
protected RevertModelSnapshotRequest doParseInstance(XContentParser parser) throws IOException {
return RevertModelSnapshotRequest.PARSER.apply(parser, null);
}
@Override

View File

@ -16,36 +16,31 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.rollup;
package org.elasticsearch.client.ml;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.client.ml.job.process.ModelSnapshotTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.junit.Before;
import java.io.IOException;
public class DeleteRollupJobResponseTests extends AbstractXContentTestCase<DeleteRollupJobResponse> {
private boolean acknowledged;
public class RevertModelSnapshotResponseTests extends AbstractXContentTestCase<RevertModelSnapshotResponse> {
@Before
public void setupJobID() {
acknowledged = randomBoolean();
@Override
protected RevertModelSnapshotResponse createTestInstance() {
ModelSnapshot.Builder modelBuilder = ModelSnapshotTests.createRandomizedBuilder();
return new RevertModelSnapshotResponse(modelBuilder);
}
@Override
protected DeleteRollupJobResponse createTestInstance() {
return new DeleteRollupJobResponse(acknowledged);
}
@Override
protected DeleteRollupJobResponse doParseInstance(XContentParser parser) throws IOException {
return DeleteRollupJobResponse.fromXContent(parser);
protected RevertModelSnapshotResponse doParseInstance(XContentParser parser) throws IOException {
return RevertModelSnapshotResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
return true;
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class UpdateModelSnapshotRequestTests extends AbstractXContentTestCase<UpdateModelSnapshotRequest> {
@Override
protected UpdateModelSnapshotRequest createTestInstance() {
String jobId = randomAlphaOfLengthBetween(1, 20);
String snapshotId = randomAlphaOfLengthBetween(1, 20);
UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId);
if (randomBoolean()) {
request.setDescription(String.valueOf(randomNonNegativeLong()));
}
if (randomBoolean()) {
request.setRetain(randomBoolean());
}
return request;
}
@Override
protected UpdateModelSnapshotRequest doParseInstance(XContentParser parser) throws IOException {
return UpdateModelSnapshotRequest.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return false;
}
}

View File

@ -16,24 +16,32 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.rollup;
package org.elasticsearch.client.ml;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.client.ml.job.process.ModelSnapshotTests;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class PutRollupJobResponse extends AcknowledgedResponse {
public PutRollupJobResponse(boolean acknowledged) {
super(acknowledged);
public class UpdateModelSnapshotResponseTests extends AbstractXContentTestCase<UpdateModelSnapshotResponse> {
@Override
protected UpdateModelSnapshotResponse createTestInstance() {
Boolean acknowledged = randomBoolean();
ModelSnapshot.Builder modelBuilder = ModelSnapshotTests.createRandomizedBuilder();
return new UpdateModelSnapshotResponse(acknowledged, modelBuilder);
}
private static final ConstructingObjectParser<PutRollupJobResponse, Void> PARSER = AcknowledgedResponse
.generateParser("delete_rollup_job_response", PutRollupJobResponse::new, AcknowledgedResponse.PARSE_FIELD_NAME);
@Override
protected UpdateModelSnapshotResponse doParseInstance(XContentParser parser) throws IOException {
return UpdateModelSnapshotResponse.fromXContent(parser);
}
public static PutRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.client.ml.calendars;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
@ -26,12 +27,16 @@ import java.util.Date;
public class ScheduledEventTests extends AbstractXContentTestCase<ScheduledEvent> {
public static ScheduledEvent testInstance() {
public static ScheduledEvent testInstance(String calendarId, @Nullable String eventId) {
Date start = new Date(randomNonNegativeLong());
Date end = new Date(start.getTime() + randomIntBetween(1, 10000) * 1000);
return new ScheduledEvent(randomAlphaOfLength(10), start, end, randomAlphaOfLengthBetween(1, 20),
randomBoolean() ? null : randomAlphaOfLength(7));
return new ScheduledEvent(randomAlphaOfLength(10), start, end, calendarId, eventId);
}
public static ScheduledEvent testInstance() {
return testInstance(randomAlphaOfLengthBetween(1, 20),
randomBoolean() ? null : randomAlphaOfLength(7));
}
@Override

View File

@ -103,6 +103,9 @@ public class DatafeedConfigTests extends AbstractXContentTestCase<DatafeedConfig
if (randomBoolean()) {
builder.setChunkingConfig(ChunkingConfigTests.createRandomizedChunk());
}
if (randomBoolean()) {
builder.setDelayedDataCheckConfig(DelayedDataCheckConfigTests.createRandomizedConfig());
}
return builder;
}

View File

@ -83,6 +83,9 @@ public class DatafeedUpdateTests extends AbstractXContentTestCase<DatafeedUpdate
if (randomBoolean()) {
builder.setChunkingConfig(ChunkingConfigTests.createRandomizedChunk());
}
if (randomBoolean()) {
builder.setDelayedDataCheckConfig(DelayedDataCheckConfigTests.createRandomizedConfig());
}
return builder.build();
}

View File

@ -0,0 +1,65 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.datafeed;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import static org.hamcrest.Matchers.equalTo;
public class DelayedDataCheckConfigTests extends AbstractXContentTestCase<DelayedDataCheckConfig> {
@Override
protected DelayedDataCheckConfig createTestInstance() {
return createRandomizedConfig();
}
@Override
protected DelayedDataCheckConfig doParseInstance(XContentParser parser) {
return DelayedDataCheckConfig.PARSER.apply(parser, null);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
public void testEnabledDelayedDataCheckConfig() {
DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(5));
assertThat(delayedDataCheckConfig.isEnabled(), equalTo(true));
assertThat(delayedDataCheckConfig.getCheckWindow(), equalTo(TimeValue.timeValueHours(5)));
}
public void testDisabledDelayedDataCheckConfig() {
DelayedDataCheckConfig delayedDataCheckConfig = DelayedDataCheckConfig.disabledDelayedDataCheckConfig();
assertThat(delayedDataCheckConfig.isEnabled(), equalTo(false));
assertThat(delayedDataCheckConfig.getCheckWindow(), equalTo(null));
}
public static DelayedDataCheckConfig createRandomizedConfig() {
boolean enabled = randomBoolean();
TimeValue timeWindow = null;
if (enabled || randomBoolean()) {
timeWindow = TimeValue.timeValueMillis(randomLongBetween(1, 1_000));
}
return new DelayedDataCheckConfig(enabled, timeWindow);
}
}

View File

@ -89,12 +89,6 @@ public class AnalysisConfigTests extends AbstractXContentTestCase<AnalysisConfig
if (randomBoolean()) {
builder.setMultivariateByFields(randomBoolean());
}
if (randomBoolean()) {
builder.setOverlappingBuckets(randomBoolean());
}
if (randomBoolean()) {
builder.setResultFinalizationWindow(randomNonNegativeLong());
}
builder.setInfluencers(Arrays.asList(generateRandomStringArray(10, 10, false)));
return builder;

View File

@ -18,34 +18,25 @@
*/
package org.elasticsearch.client.rollup;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.junit.Before;
import org.elasticsearch.client.core.AcknowledgedResponseTests;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
public class StartRollupJobResponseTests extends AbstractXContentTestCase<StartRollupJobResponse> {
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
private boolean acknowledged;
public class StartRollupJobResponseTests extends ESTestCase {
@Before
public void setupAcknoledged() {
acknowledged = randomBoolean();
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
this::createTestInstance,
AcknowledgedResponseTests::toXContent,
StartRollupJobResponse::fromXContent)
.supportsUnknownFields(false)
.test();
}
@Override
protected StartRollupJobResponse createTestInstance() {
return new StartRollupJobResponse(acknowledged);
}
@Override
protected StartRollupJobResponse doParseInstance(XContentParser parser) throws IOException {
return StartRollupJobResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
private StartRollupJobResponse createTestInstance() {
return new StartRollupJobResponse(randomBoolean());
}
}

View File

@ -18,34 +18,25 @@
*/
package org.elasticsearch.client.rollup;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.junit.Before;
import org.elasticsearch.client.core.AcknowledgedResponseTests;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
public class StopRollupJobResponseTests extends AbstractXContentTestCase<StopRollupJobResponse> {
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
private boolean acknowledged;
public class StopRollupJobResponseTests extends ESTestCase {
@Before
public void setupAcknoledged() {
acknowledged = randomBoolean();
public void testFromXContent() throws IOException {
xContentTester(this::createParser,
this::createTestInstance,
AcknowledgedResponseTests::toXContent,
StopRollupJobResponse::fromXContent)
.supportsUnknownFields(false)
.test();
}
@Override
protected StopRollupJobResponse createTestInstance() {
return new StopRollupJobResponse(acknowledged);
}
@Override
protected StopRollupJobResponse doParseInstance(XContentParser parser) throws IOException {
return StopRollupJobResponse.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return false;
private StopRollupJobResponse createTestInstance() {
return new StopRollupJobResponse(randomBoolean());
}
}

View File

@ -0,0 +1,111 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.client.security.user.privileges.ApplicationResourcePrivileges;
import org.elasticsearch.client.security.user.privileges.IndicesPrivileges;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
import org.elasticsearch.test.XContentTestUtils;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
public class HasPrivilegesRequestTests extends ESTestCase {
public void testToXContent() throws IOException {
final HasPrivilegesRequest request = new HasPrivilegesRequest(
new LinkedHashSet<>(Arrays.asList("monitor", "manage_watcher", "manage_ml")),
new LinkedHashSet<>(Arrays.asList(
IndicesPrivileges.builder().indices("index-001", "index-002").privileges("all").build(),
IndicesPrivileges.builder().indices("index-003").privileges("read").build()
)),
new LinkedHashSet<>(Arrays.asList(
new ApplicationResourcePrivileges("myapp", Arrays.asList("read", "write"), Arrays.asList("*")),
new ApplicationResourcePrivileges("myapp", Arrays.asList("admin"), Arrays.asList("/data/*"))
))
);
String json = Strings.toString(request);
final Map<String, Object> parsed = XContentHelper.convertToMap(XContentType.JSON.xContent(), json, false);
final Map<String, Object> expected = XContentHelper.convertToMap(XContentType.JSON.xContent(), "{" +
" \"cluster\":[\"monitor\",\"manage_watcher\",\"manage_ml\"]," +
" \"index\":[{" +
" \"names\":[\"index-001\",\"index-002\"]," +
" \"privileges\":[\"all\"]" +
" },{" +
" \"names\":[\"index-003\"]," +
" \"privileges\":[\"read\"]" +
" }]," +
" \"application\":[{" +
" \"application\":\"myapp\"," +
" \"privileges\":[\"read\",\"write\"]," +
" \"resources\":[\"*\"]" +
" },{" +
" \"application\":\"myapp\"," +
" \"privileges\":[\"admin\"]," +
" \"resources\":[\"/data/*\"]" +
" }]" +
"}", false);
assertThat(XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder(parsed, expected), Matchers.nullValue());
}
public void testEqualsAndHashCode() {
final Set<String> cluster = Sets.newHashSet(randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8)));
final Set<IndicesPrivileges> indices = Sets.newHashSet(randomArray(1, 5, IndicesPrivileges[]::new,
() -> IndicesPrivileges.builder()
.indices(generateRandomStringArray(5, 12, false, false))
.privileges(generateRandomStringArray(3, 8, false, false))
.build()));
final Set<ApplicationResourcePrivileges> application = Sets.newHashSet(randomArray(1, 5, ApplicationResourcePrivileges[]::new,
() -> new ApplicationResourcePrivileges(
randomAlphaOfLengthBetween(5, 12),
Sets.newHashSet(generateRandomStringArray(3, 8, false, false)),
Sets.newHashSet(generateRandomStringArray(2, 6, false, false))
)));
final HasPrivilegesRequest request = new HasPrivilegesRequest(cluster, indices, application);
EqualsHashCodeTestUtils.checkEqualsAndHashCode(request, this::copy, this::mutate);
}
private HasPrivilegesRequest copy(HasPrivilegesRequest request) {
return new HasPrivilegesRequest(request.getClusterPrivileges(), request.getIndexPrivileges(), request.getApplicationPrivileges());
}
private HasPrivilegesRequest mutate(HasPrivilegesRequest request) {
switch (randomIntBetween(1, 3)) {
case 1:
return new HasPrivilegesRequest(null, request.getIndexPrivileges(), request.getApplicationPrivileges());
case 2:
return new HasPrivilegesRequest(request.getClusterPrivileges(), null, request.getApplicationPrivileges());
case 3:
return new HasPrivilegesRequest(request.getClusterPrivileges(), request.getIndexPrivileges(), null);
}
throw new IllegalStateException("The universe is broken (or the RNG is)");
}
}

View File

@ -0,0 +1,262 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import static java.util.Collections.emptyMap;
public class HasPrivilegesResponseTests extends ESTestCase {
public void testParseValidResponse() throws IOException {
String json = "{" +
" \"username\": \"namor\"," +
" \"has_all_requested\": false," +
" \"cluster\" : {" +
" \"manage\" : false," +
" \"monitor\" : true" +
" }," +
" \"index\" : {" +
" \"index-01\": {" +
" \"read\" : true," +
" \"write\" : false" +
" }," +
" \"index-02\": {" +
" \"read\" : true," +
" \"write\" : true" +
" }," +
" \"index-03\": {" +
" \"read\" : false," +
" \"write\" : false" +
" }" +
" }," +
" \"application\" : {" +
" \"app01\" : {" +
" \"/object/1\" : {" +
" \"read\" : true," +
" \"write\" : false" +
" }," +
" \"/object/2\" : {" +
" \"read\" : true," +
" \"write\" : true" +
" }" +
" }," +
" \"app02\" : {" +
" \"/object/1\" : {" +
" \"read\" : false," +
" \"write\" : false" +
" }," +
" \"/object/3\" : {" +
" \"read\" : false," +
" \"write\" : true" +
" }" +
" }" +
" }" +
"}";
final XContentParser parser = createParser(XContentType.JSON.xContent(), json);
HasPrivilegesResponse response = HasPrivilegesResponse.fromXContent(parser);
assertThat(response.getUsername(), Matchers.equalTo("namor"));
assertThat(response.hasAllRequested(), Matchers.equalTo(false));
assertThat(response.getClusterPrivileges().keySet(), Matchers.containsInAnyOrder("monitor", "manage"));
assertThat(response.hasClusterPrivilege("monitor"), Matchers.equalTo(true));
assertThat(response.hasClusterPrivilege("manage"), Matchers.equalTo(false));
assertThat(response.getIndexPrivileges().keySet(), Matchers.containsInAnyOrder("index-01", "index-02", "index-03"));
assertThat(response.hasIndexPrivilege("index-01", "read"), Matchers.equalTo(true));
assertThat(response.hasIndexPrivilege("index-01", "write"), Matchers.equalTo(false));
assertThat(response.hasIndexPrivilege("index-02", "read"), Matchers.equalTo(true));
assertThat(response.hasIndexPrivilege("index-02", "write"), Matchers.equalTo(true));
assertThat(response.hasIndexPrivilege("index-03", "read"), Matchers.equalTo(false));
assertThat(response.hasIndexPrivilege("index-03", "write"), Matchers.equalTo(false));
assertThat(response.getApplicationPrivileges().keySet(), Matchers.containsInAnyOrder("app01", "app02"));
assertThat(response.hasApplicationPrivilege("app01", "/object/1", "read"), Matchers.equalTo(true));
assertThat(response.hasApplicationPrivilege("app01", "/object/1", "write"), Matchers.equalTo(false));
assertThat(response.hasApplicationPrivilege("app01", "/object/2", "read"), Matchers.equalTo(true));
assertThat(response.hasApplicationPrivilege("app01", "/object/2", "write"), Matchers.equalTo(true));
assertThat(response.hasApplicationPrivilege("app02", "/object/1", "read"), Matchers.equalTo(false));
assertThat(response.hasApplicationPrivilege("app02", "/object/1", "write"), Matchers.equalTo(false));
assertThat(response.hasApplicationPrivilege("app02", "/object/3", "read"), Matchers.equalTo(false));
assertThat(response.hasApplicationPrivilege("app02", "/object/3", "write"), Matchers.equalTo(true));
}
public void testHasClusterPrivilege() {
final Map<String, Boolean> cluster = MapBuilder.<String, Boolean>newMapBuilder()
.put("a", true)
.put("b", false)
.put("c", false)
.put("d", true)
.map();
final HasPrivilegesResponse response = new HasPrivilegesResponse("x", false, cluster, emptyMap(), emptyMap());
assertThat(response.hasClusterPrivilege("a"), Matchers.is(true));
assertThat(response.hasClusterPrivilege("b"), Matchers.is(false));
assertThat(response.hasClusterPrivilege("c"), Matchers.is(false));
assertThat(response.hasClusterPrivilege("d"), Matchers.is(true));
final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> response.hasClusterPrivilege("e"));
assertThat(iae.getMessage(), Matchers.containsString("[e]"));
assertThat(iae.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("cluster privilege"));
}
public void testHasIndexPrivilege() {
final Map<String, Map<String, Boolean>> index = MapBuilder.<String, Map<String, Boolean>>newMapBuilder()
.put("i1", Collections.singletonMap("read", true))
.put("i2", Collections.singletonMap("read", false))
.put("i3", MapBuilder.<String, Boolean>newMapBuilder().put("read", true).put("write", true).map())
.put("i4", MapBuilder.<String, Boolean>newMapBuilder().put("read", true).put("write", false).map())
.put("i*", MapBuilder.<String, Boolean>newMapBuilder().put("read", false).put("write", false).map())
.map();
final HasPrivilegesResponse response = new HasPrivilegesResponse("x", false, emptyMap(), index, emptyMap());
assertThat(response.hasIndexPrivilege("i1", "read"), Matchers.is(true));
assertThat(response.hasIndexPrivilege("i2", "read"), Matchers.is(false));
assertThat(response.hasIndexPrivilege("i3", "read"), Matchers.is(true));
assertThat(response.hasIndexPrivilege("i3", "write"), Matchers.is(true));
assertThat(response.hasIndexPrivilege("i4", "read"), Matchers.is(true));
assertThat(response.hasIndexPrivilege("i4", "write"), Matchers.is(false));
assertThat(response.hasIndexPrivilege("i*", "read"), Matchers.is(false));
assertThat(response.hasIndexPrivilege("i*", "write"), Matchers.is(false));
final IllegalArgumentException iae1 = expectThrows(IllegalArgumentException.class, () -> response.hasIndexPrivilege("i0", "read"));
assertThat(iae1.getMessage(), Matchers.containsString("index [i0]"));
final IllegalArgumentException iae2 = expectThrows(IllegalArgumentException.class, () -> response.hasIndexPrivilege("i1", "write"));
assertThat(iae2.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("privilege [write]"));
assertThat(iae2.getMessage(), Matchers.containsString("index [i1]"));
}
public void testHasApplicationPrivilege() {
final Map<String, Map<String, Boolean>> app1 = MapBuilder.<String, Map<String, Boolean>>newMapBuilder()
.put("/data/1", Collections.singletonMap("read", true))
.put("/data/2", Collections.singletonMap("read", false))
.put("/data/3", MapBuilder.<String, Boolean>newMapBuilder().put("read", true).put("write", true).map())
.put("/data/4", MapBuilder.<String, Boolean>newMapBuilder().put("read", true).put("write", false).map())
.map();
final Map<String, Map<String, Boolean>> app2 = MapBuilder.<String, Map<String, Boolean>>newMapBuilder()
.put("/action/1", Collections.singletonMap("execute", true))
.put("/action/*", Collections.singletonMap("execute", false))
.map();
Map<String, Map<String, Map<String, Boolean>>> appPrivileges = new HashMap<>();
appPrivileges.put("a1", app1);
appPrivileges.put("a2", app2);
final HasPrivilegesResponse response = new HasPrivilegesResponse("x", false, emptyMap(), emptyMap(), appPrivileges);
assertThat(response.hasApplicationPrivilege("a1", "/data/1", "read"), Matchers.is(true));
assertThat(response.hasApplicationPrivilege("a1", "/data/2", "read"), Matchers.is(false));
assertThat(response.hasApplicationPrivilege("a1", "/data/3", "read"), Matchers.is(true));
assertThat(response.hasApplicationPrivilege("a1", "/data/3", "write"), Matchers.is(true));
assertThat(response.hasApplicationPrivilege("a1", "/data/4", "read"), Matchers.is(true));
assertThat(response.hasApplicationPrivilege("a1", "/data/4", "write"), Matchers.is(false));
assertThat(response.hasApplicationPrivilege("a2", "/action/1", "execute"), Matchers.is(true));
assertThat(response.hasApplicationPrivilege("a2", "/action/*", "execute"), Matchers.is(false));
final IllegalArgumentException iae1 = expectThrows(IllegalArgumentException.class,
() -> response.hasApplicationPrivilege("a0", "/data/1", "read"));
assertThat(iae1.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("application [a0]"));
final IllegalArgumentException iae2 = expectThrows(IllegalArgumentException.class,
() -> response.hasApplicationPrivilege("a1", "/data/0", "read"));
assertThat(iae2.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("application [a1]"));
assertThat(iae2.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("resource [/data/0]"));
final IllegalArgumentException iae3 = expectThrows(IllegalArgumentException.class,
() -> response.hasApplicationPrivilege("a1", "/action/1", "execute"));
assertThat(iae3.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("application [a1]"));
assertThat(iae3.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("resource [/action/1]"));
final IllegalArgumentException iae4 = expectThrows(IllegalArgumentException.class,
() -> response.hasApplicationPrivilege("a1", "/data/1", "write"));
assertThat(iae4.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("application [a1]"));
assertThat(iae4.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("resource [/data/1]"));
assertThat(iae4.getMessage().toLowerCase(Locale.ROOT), Matchers.containsString("privilege [write]"));
}
public void testEqualsAndHashCode() {
final HasPrivilegesResponse response = randomResponse();
EqualsHashCodeTestUtils.checkEqualsAndHashCode(response, this::copy, this::mutate);
}
private HasPrivilegesResponse copy(HasPrivilegesResponse response) {
return new HasPrivilegesResponse(response.getUsername(),
response.hasAllRequested(),
response.getClusterPrivileges(),
response.getIndexPrivileges(),
response.getApplicationPrivileges());
}
private HasPrivilegesResponse mutate(HasPrivilegesResponse request) {
switch (randomIntBetween(1, 5)) {
case 1:
return new HasPrivilegesResponse("_" + request.getUsername(), request.hasAllRequested(),
request.getClusterPrivileges(), request.getIndexPrivileges(), request.getApplicationPrivileges());
case 2:
return new HasPrivilegesResponse(request.getUsername(), request.hasAllRequested() == false,
request.getClusterPrivileges(), request.getIndexPrivileges(), request.getApplicationPrivileges());
case 3:
return new HasPrivilegesResponse(request.getUsername(), request.hasAllRequested(),
emptyMap(), request.getIndexPrivileges(), request.getApplicationPrivileges());
case 4:
return new HasPrivilegesResponse(request.getUsername(), request.hasAllRequested(),
request.getClusterPrivileges(), emptyMap(), request.getApplicationPrivileges());
case 5:
return new HasPrivilegesResponse(request.getUsername(), request.hasAllRequested(),
request.getClusterPrivileges(), request.getIndexPrivileges(), emptyMap());
}
throw new IllegalStateException("The universe is broken (or the RNG is)");
}
private HasPrivilegesResponse randomResponse() {
final Map<String, Boolean> cluster = randomPrivilegeMap();
final Map<String, Map<String, Boolean>> index = randomResourceMap();
final Map<String, Map<String, Map<String, Boolean>>> application = new HashMap<>();
for (String app : randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 6).toLowerCase(Locale.ROOT))) {
application.put(app, randomResourceMap());
}
return new HasPrivilegesResponse(randomAlphaOfLengthBetween(3, 8), randomBoolean(), cluster, index, application);
}
private Map<String, Map<String, Boolean>> randomResourceMap() {
final Map<String, Map<String, Boolean>> resource = new HashMap<>();
for (String res : randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(5, 8))) {
resource.put(res, randomPrivilegeMap());
}
return resource;
}
private Map<String, Boolean> randomPrivilegeMap() {
final Map<String, Boolean> map = new HashMap<>();
for (String privilege : randomArray(1, 6, String[]::new, () -> randomAlphaOfLengthBetween(3, 12))) {
map.put(privilege, randomBoolean());
}
return map;
}
}

View File

@ -1,116 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.xpack;
import org.elasticsearch.client.license.LicenseStatus;
import org.elasticsearch.client.xpack.XPackInfoResponse.BuildInfo;
import org.elasticsearch.client.xpack.XPackInfoResponse.FeatureSetsInfo;
import org.elasticsearch.client.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet;
import org.elasticsearch.client.xpack.XPackInfoResponse.LicenseInfo;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
public class XPackInfoResponseTests extends AbstractXContentTestCase<XPackInfoResponse> {
@Override
protected boolean supportsUnknownFields() {
return true;
}
protected XPackInfoResponse doParseInstance(XContentParser parser) throws IOException {
return XPackInfoResponse.fromXContent(parser);
}
protected Predicate<String> getRandomFieldsExcludeFilter() {
return path -> path.equals("features")
|| (path.startsWith("features") && path.endsWith("native_code_info"));
}
protected ToXContent.Params getToXContentParams() {
Map<String, String> params = new HashMap<>();
if (randomBoolean()) {
params.put("human", randomBoolean() ? "true" : "false");
}
if (randomBoolean()) {
params.put("categories", "_none");
}
return new ToXContent.MapParams(params);
}
protected XPackInfoResponse createTestInstance() {
return new XPackInfoResponse(
randomBoolean() ? null : randomBuildInfo(),
randomBoolean() ? null : randomLicenseInfo(),
randomBoolean() ? null : randomFeatureSetsInfo());
}
private BuildInfo randomBuildInfo() {
return new BuildInfo(
randomAlphaOfLength(10),
randomAlphaOfLength(15));
}
private LicenseInfo randomLicenseInfo() {
return new LicenseInfo(
randomAlphaOfLength(10),
randomAlphaOfLength(4),
randomAlphaOfLength(5),
randomFrom(LicenseStatus.values()),
randomLong());
}
private FeatureSetsInfo randomFeatureSetsInfo() {
int size = between(0, 10);
Set<FeatureSet> featureSets = new HashSet<>(size);
while (featureSets.size() < size) {
featureSets.add(randomFeatureSet());
}
return new FeatureSetsInfo(featureSets);
}
private FeatureSet randomFeatureSet() {
return new FeatureSet(
randomAlphaOfLength(5),
randomBoolean() ? null : randomAlphaOfLength(20),
randomBoolean(),
randomBoolean(),
randomNativeCodeInfo());
}
private Map<String, Object> randomNativeCodeInfo() {
if (randomBoolean()) {
return null;
}
int size = between(0, 10);
Map<String, Object> nativeCodeInfo = new HashMap<>(size);
while (nativeCodeInfo.size() < size) {
nativeCodeInfo.put(randomAlphaOfLength(5), randomAlphaOfLength(5));
}
return nativeCodeInfo;
}
}

View File

@ -112,6 +112,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.not;
@ -726,7 +727,7 @@ public class InstallPluginCommandTests extends ESTestCase {
assertInstallCleaned(env.v2());
}
public void testOfficialPluginsHelpSorted() throws Exception {
public void testOfficialPluginsHelpSortedAndMissingObviouslyWrongPlugins() throws Exception {
MockTerminal terminal = new MockTerminal();
new InstallPluginCommand() {
@Override
@ -749,6 +750,9 @@ public class InstallPluginCommandTests extends ESTestCase {
assertTrue(prev + " < " + line, prev.compareTo(line) < 0);
prev = line;
line = reader.readLine();
// qa is not really a plugin and it shouldn't sneak in
assertThat(line, not(endsWith("qa")));
assertThat(line, not(endsWith("example")));
}
}
}

View File

@ -11,7 +11,7 @@
release-state can be: released | prerelease | unreleased
//////////
:release-state: unreleased
:release-state: prerelease
:issue: https://github.com/elastic/elasticsearch/issues/
:pull: https://github.com/elastic/elasticsearch/pull/

View File

@ -0,0 +1,35 @@
--
:api: ccr-resume-follow
:request: ResumeFollowRequest
:response: ResumeFollowResponse
--
[id="{upid}-{api}"]
=== Resume Follow API
[id="{upid}-{api}-request"]
==== Request
The Resume Follow API allows you to resume following a follower index that has been paused.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> The name of follower index.
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ indicates if the resume follow request was received.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------
<1> Whether or not the resume follow was acknowledged.
include::../execution.asciidoc[]

View File

@ -0,0 +1,36 @@
--
:api: ccr-unfollow
:request: UnfollowRequest
:response: UnfollowResponse
--
[id="{upid}-{api}"]
=== Unfollow API
[id="{upid}-{api}-request"]
==== Request
The Unfollow API allows you to unfollow a follower index and make it a regular index.
Note that the follower index needs to be paused and the follower index needs to be closed.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> The name of follow index to unfollow.
[id="{upid}-{api}-response"]
==== Response
The returned +{response}+ indicates if the unfollow request was received.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------
<1> Whether or not the unfollow was acknowledge.
include::../execution.asciidoc[]

View File

@ -37,9 +37,9 @@ And different operation types can be added to the same +{request}+:
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request-with-mixed-operations]
--------------------------------------------------
<1> Adds a `DeleteRequest` to the `BulkRequest`. See <<{upid}-delete>>
<1> Adds a `DeleteRequest` to the +{request}+. See <<{upid}-delete>>
for more information on how to build `DeleteRequest`.
<2> Adds an `UpdateRequest` to the `BulkRequest`. See <<{upid}-update>>
<2> Adds an `UpdateRequest` to the +{request}+. See <<{upid}-update>>
for more information on how to build `UpdateRequest`.
<3> Adds an `IndexRequest` using the SMILE format
@ -72,22 +72,22 @@ the index/update/delete operations.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-request-pipeline]
include-tagged::{doc-tests-file}[{api}-request-pipeline]
--------------------------------------------------
<1> Global pipelineId used on all sub requests, unless overridden on a sub request
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-request-routing]
include-tagged::{doc-tests-file}[{api}-request-routing]
--------------------------------------------------
<1> Global routingId used on all sub requests, unless overridden on a sub request
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[bulk-request-index-type]
include-tagged::{doc-tests-file}[{api}-request-index-type]
--------------------------------------------------
<1> A bulk request with global index and type used on all sub requests, unless overridden on a sub request.
Both parameters are @Nullable and can only be set during BulkRequest creation.
Both parameters are @Nullable and can only be set during +{request}+ creation.
include::../execution.asciidoc[]
@ -167,7 +167,7 @@ actions currently added (defaults to 1000, use -1 to disable it)
actions currently added (defaults to 5Mb, use -1 to disable it)
<3> Set the number of concurrent requests allowed to be executed
(default to 1, use 0 to only allow the execution of a single request)
<4> Set a flush interval flushing any `BulkRequest` pending if the
<4> Set a flush interval flushing any +{request}+ pending if the
interval passes (defaults to not set)
<5> Set a constant back off policy that initially waits for 1 second
and retries up to 3 times. See `BackoffPolicy.noBackoff()`,

View File

@ -0,0 +1,59 @@
--
:api: multi-term-vectors
:request: MultiTermVectorsRequest
:response: MultiTermVectorsResponse
:tvrequest: TermVectorsRequest
--
[id="{upid}-{api}"]
=== Multi Term Vectors API
Multi Term Vectors API allows to get multiple term vectors at once.
[id="{upid}-{api}-request"]
==== Multi Term Vectors Request
There are two ways to create a +{request}+.
The first way is to create an empty +{request}+, and then add individual
<<java-rest-high-document-term-vectors, term vectors requests>> to it.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request]
--------------------------------------------------
<1> Create an empty +{request}+.
<2> Add the first +{tvrequest}+ to the +{request}+.
<3> Add the second +{tvrequest}+ for an artificial doc to the +{request}+.
The second way can be used when all term vectors requests share the same
arguments, such as index, type, and other settings. In this case, a template
+{tvrequest}+ can be created with all necessary settings set, and
this template request can be passed to +{request}+ along with all
documents' ids for which to execute these requests.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-request-template]
--------------------------------------------------
<1> Create a template +{tvrequest}+.
<2> Pass documents' ids and the template to the +{request}+.
include::../execution.asciidoc[]
[id="{upid}-{api}-response"]
==== Multi Term Vectors Response
+{response}+ allows to get the list of term vectors responses,
each of which can be inspected as described in
<<java-rest-high-document-term-vectors>>.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests-file}[{api}-response]
--------------------------------------------------
<1> Get a list of `TermVectorsResponse`

Some files were not shown because too many files have changed in this diff Show More