mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-13 00:15:47 +00:00
Merge remote-tracking branch 'origin/master' into index-lifecycle
This commit is contained in:
commit
c87cff22b4
@ -1,7 +1,6 @@
|
|||||||
package com.carrotsearch.gradle.junit4
|
package com.carrotsearch.gradle.junit4
|
||||||
|
|
||||||
import com.carrotsearch.ant.tasks.junit4.JUnit4
|
import com.carrotsearch.ant.tasks.junit4.JUnit4
|
||||||
import org.gradle.api.GradleException
|
|
||||||
import org.gradle.api.Plugin
|
import org.gradle.api.Plugin
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
import org.gradle.api.Task
|
import org.gradle.api.Task
|
||||||
@ -11,12 +10,8 @@ import org.gradle.api.tasks.TaskContainer
|
|||||||
import org.gradle.api.tasks.TaskProvider
|
import org.gradle.api.tasks.TaskProvider
|
||||||
import org.gradle.api.tasks.testing.Test
|
import org.gradle.api.tasks.testing.Test
|
||||||
|
|
||||||
import java.util.concurrent.atomic.AtomicBoolean
|
|
||||||
|
|
||||||
class RandomizedTestingPlugin implements Plugin<Project> {
|
class RandomizedTestingPlugin implements Plugin<Project> {
|
||||||
|
|
||||||
static private AtomicBoolean sanityCheckConfigured = new AtomicBoolean(false)
|
|
||||||
|
|
||||||
void apply(Project project) {
|
void apply(Project project) {
|
||||||
setupSeed(project)
|
setupSeed(project)
|
||||||
replaceTestTask(project.tasks)
|
replaceTestTask(project.tasks)
|
||||||
@ -27,16 +22,10 @@ class RandomizedTestingPlugin implements Plugin<Project> {
|
|||||||
private static void configureSanityCheck(Project project) {
|
private static void configureSanityCheck(Project project) {
|
||||||
// Check the task graph to confirm tasks were indeed replaced
|
// Check the task graph to confirm tasks were indeed replaced
|
||||||
// https://github.com/elastic/elasticsearch/issues/31324
|
// https://github.com/elastic/elasticsearch/issues/31324
|
||||||
if (sanityCheckConfigured.getAndSet(true) == false) {
|
project.rootProject.getGradle().getTaskGraph().whenReady {
|
||||||
project.rootProject.getGradle().getTaskGraph().whenReady {
|
Task test = project.getTasks().findByName("test")
|
||||||
List<Task> nonConforming = project.getGradle().getTaskGraph().allTasks
|
if (test != null && (test instanceof RandomizedTestingTask) == false) {
|
||||||
.findAll { it.name == "test" }
|
throw new IllegalStateException("Test task was not replaced in project ${project.path}. Found ${test.getClass()}")
|
||||||
.findAll { (it instanceof RandomizedTestingTask) == false}
|
|
||||||
.collect { "${it.path} -> ${it.class}" }
|
|
||||||
if (nonConforming.isEmpty() == false) {
|
|
||||||
throw new GradleException("Found the ${nonConforming.size()} `test` tasks:" +
|
|
||||||
"\n ${nonConforming.join("\n ")}")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -828,9 +828,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||||||
// TODO: remove this once ctx isn't added to update script params in 7.0
|
// TODO: remove this once ctx isn't added to update script params in 7.0
|
||||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||||
|
|
||||||
//TODO: remove this once the cname is prepended to the address by default in 7.0
|
|
||||||
systemProperty 'es.http.cname_in_publish_address', 'true'
|
|
||||||
|
|
||||||
// Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM
|
// Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM
|
||||||
if (project.inFipsJvm) {
|
if (project.inFipsJvm) {
|
||||||
systemProperty 'javax.net.ssl.trustStorePassword', 'password'
|
systemProperty 'javax.net.ssl.trustStorePassword', 'password'
|
||||||
|
@ -62,7 +62,13 @@
|
|||||||
-->
|
-->
|
||||||
<module name="ModifierOrder" />
|
<module name="ModifierOrder" />
|
||||||
|
|
||||||
|
<!-- Checks that we don't include modifier where they are implied. For
|
||||||
|
example, this does not allow interface methods to be declared public
|
||||||
|
because they are *always* public. -->
|
||||||
<module name="RedundantModifier" />
|
<module name="RedundantModifier" />
|
||||||
|
<!-- Checks that all java files have a package declaration and that it
|
||||||
|
lines up with the directory structure. -->
|
||||||
|
<module name="PackageDeclaration"/>
|
||||||
|
|
||||||
<!-- We don't use Java's builtin serialization and we suppress all warning
|
<!-- We don't use Java's builtin serialization and we suppress all warning
|
||||||
about it. The flip side of that coin is that we shouldn't _try_ to use
|
about it. The flip side of that coin is that we shouldn't _try_ to use
|
||||||
|
@ -17,6 +17,10 @@
|
|||||||
<!-- the constructors on some local classes in these tests must be public-->
|
<!-- the constructors on some local classes in these tests must be public-->
|
||||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="RedundantModifier" />
|
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="RedundantModifier" />
|
||||||
|
|
||||||
|
<!-- Intentionally doesn't have a package declaration to test logging
|
||||||
|
configuration of classes that aren't in packages. -->
|
||||||
|
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]Dummy.java" checks="PackageDeclaration" />
|
||||||
|
|
||||||
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
|
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
|
||||||
files start to pass. -->
|
files start to pass. -->
|
||||||
<suppress files="client[/\\]rest[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]HeapBufferedAsyncResponseConsumerTests.java" checks="LineLength" />
|
<suppress files="client[/\\]rest[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]HeapBufferedAsyncResponseConsumerTests.java" checks="LineLength" />
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
elasticsearch = 7.0.0-alpha1
|
elasticsearch = 7.0.0-alpha1
|
||||||
lucene = 8.0.0-snapshot-66c671ea80
|
lucene = 8.0.0-snapshot-7d0a7782fa
|
||||||
|
|
||||||
# optional dependencies
|
# optional dependencies
|
||||||
spatial4j = 0.7
|
spatial4j = 0.7
|
||||||
|
@ -89,7 +89,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public AcknowledgedResponse delete(DeleteIndexRequest deleteIndexRequest, RequestOptions options) throws IOException {
|
public AcknowledgedResponse delete(DeleteIndexRequest deleteIndexRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(deleteIndexRequest, RequestConverters::deleteIndex, options,
|
return restHighLevelClient.performRequestAndParseEntity(deleteIndexRequest, IndicesRequestConverters::deleteIndex, options,
|
||||||
AcknowledgedResponse::fromXContent, emptySet());
|
AcknowledgedResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -102,7 +102,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void deleteAsync(DeleteIndexRequest deleteIndexRequest, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
public void deleteAsync(DeleteIndexRequest deleteIndexRequest, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteIndexRequest, RequestConverters::deleteIndex, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(deleteIndexRequest, IndicesRequestConverters::deleteIndex, options,
|
||||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -116,7 +116,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public CreateIndexResponse create(CreateIndexRequest createIndexRequest, RequestOptions options) throws IOException {
|
public CreateIndexResponse create(CreateIndexRequest createIndexRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(createIndexRequest, RequestConverters::createIndex, options,
|
return restHighLevelClient.performRequestAndParseEntity(createIndexRequest, IndicesRequestConverters::createIndex, options,
|
||||||
CreateIndexResponse::fromXContent, emptySet());
|
CreateIndexResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,7 +129,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void createAsync(CreateIndexRequest createIndexRequest, RequestOptions options, ActionListener<CreateIndexResponse> listener) {
|
public void createAsync(CreateIndexRequest createIndexRequest, RequestOptions options, ActionListener<CreateIndexResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(createIndexRequest, RequestConverters::createIndex, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(createIndexRequest, IndicesRequestConverters::createIndex, options,
|
||||||
CreateIndexResponse::fromXContent, listener, emptySet());
|
CreateIndexResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -143,7 +143,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public AcknowledgedResponse putMapping(PutMappingRequest putMappingRequest, RequestOptions options) throws IOException {
|
public AcknowledgedResponse putMapping(PutMappingRequest putMappingRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(putMappingRequest, RequestConverters::putMapping, options,
|
return restHighLevelClient.performRequestAndParseEntity(putMappingRequest, IndicesRequestConverters::putMapping, options,
|
||||||
AcknowledgedResponse::fromXContent, emptySet());
|
AcknowledgedResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -157,7 +157,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void putMappingAsync(PutMappingRequest putMappingRequest, RequestOptions options,
|
public void putMappingAsync(PutMappingRequest putMappingRequest, RequestOptions options,
|
||||||
ActionListener<AcknowledgedResponse> listener) {
|
ActionListener<AcknowledgedResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, RequestConverters::putMapping, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, IndicesRequestConverters::putMapping, options,
|
||||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -171,7 +171,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public GetMappingsResponse getMapping(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException {
|
public GetMappingsResponse getMapping(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options,
|
return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, IndicesRequestConverters::getMappings, options,
|
||||||
GetMappingsResponse::fromXContent, emptySet());
|
GetMappingsResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -185,7 +185,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void getMappingAsync(GetMappingsRequest getMappingsRequest, RequestOptions options,
|
public void getMappingAsync(GetMappingsRequest getMappingsRequest, RequestOptions options,
|
||||||
ActionListener<GetMappingsResponse> listener) {
|
ActionListener<GetMappingsResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, IndicesRequestConverters::getMappings, options,
|
||||||
GetMappingsResponse::fromXContent, listener, emptySet());
|
GetMappingsResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,7 +200,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public GetFieldMappingsResponse getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest,
|
public GetFieldMappingsResponse getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest,
|
||||||
RequestOptions options) throws IOException {
|
RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options,
|
return restHighLevelClient.performRequestAndParseEntity(getFieldMappingsRequest, IndicesRequestConverters::getFieldMapping, options,
|
||||||
GetFieldMappingsResponse::fromXContent, emptySet());
|
GetFieldMappingsResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -214,7 +214,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void getFieldMappingAsync(GetFieldMappingsRequest getFieldMappingsRequest, RequestOptions options,
|
public void getFieldMappingAsync(GetFieldMappingsRequest getFieldMappingsRequest, RequestOptions options,
|
||||||
ActionListener<GetFieldMappingsResponse> listener) {
|
ActionListener<GetFieldMappingsResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(getFieldMappingsRequest, IndicesRequestConverters::getFieldMapping, options,
|
||||||
GetFieldMappingsResponse::fromXContent, listener, emptySet());
|
GetFieldMappingsResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -228,7 +228,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public AcknowledgedResponse updateAliases(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options) throws IOException {
|
public AcknowledgedResponse updateAliases(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(indicesAliasesRequest, RequestConverters::updateAliases, options,
|
return restHighLevelClient.performRequestAndParseEntity(indicesAliasesRequest, IndicesRequestConverters::updateAliases, options,
|
||||||
AcknowledgedResponse::fromXContent, emptySet());
|
AcknowledgedResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -242,7 +242,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void updateAliasesAsync(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options,
|
public void updateAliasesAsync(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options,
|
||||||
ActionListener<AcknowledgedResponse> listener) {
|
ActionListener<AcknowledgedResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(indicesAliasesRequest, RequestConverters::updateAliases, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(indicesAliasesRequest, IndicesRequestConverters::updateAliases, options,
|
||||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -256,7 +256,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public OpenIndexResponse open(OpenIndexRequest openIndexRequest, RequestOptions options) throws IOException {
|
public OpenIndexResponse open(OpenIndexRequest openIndexRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(openIndexRequest, RequestConverters::openIndex, options,
|
return restHighLevelClient.performRequestAndParseEntity(openIndexRequest, IndicesRequestConverters::openIndex, options,
|
||||||
OpenIndexResponse::fromXContent, emptySet());
|
OpenIndexResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -269,7 +269,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void openAsync(OpenIndexRequest openIndexRequest, RequestOptions options, ActionListener<OpenIndexResponse> listener) {
|
public void openAsync(OpenIndexRequest openIndexRequest, RequestOptions options, ActionListener<OpenIndexResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(openIndexRequest, RequestConverters::openIndex, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(openIndexRequest, IndicesRequestConverters::openIndex, options,
|
||||||
OpenIndexResponse::fromXContent, listener, emptySet());
|
OpenIndexResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -283,7 +283,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public AcknowledgedResponse close(CloseIndexRequest closeIndexRequest, RequestOptions options) throws IOException {
|
public AcknowledgedResponse close(CloseIndexRequest closeIndexRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(closeIndexRequest, RequestConverters::closeIndex, options,
|
return restHighLevelClient.performRequestAndParseEntity(closeIndexRequest, IndicesRequestConverters::closeIndex, options,
|
||||||
AcknowledgedResponse::fromXContent, emptySet());
|
AcknowledgedResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -296,7 +296,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void closeAsync(CloseIndexRequest closeIndexRequest, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
public void closeAsync(CloseIndexRequest closeIndexRequest, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(closeIndexRequest, RequestConverters::closeIndex, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(closeIndexRequest, IndicesRequestConverters::closeIndex, options,
|
||||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -311,7 +311,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request
|
* @throws IOException in case there is a problem sending the request
|
||||||
*/
|
*/
|
||||||
public boolean existsAlias(GetAliasesRequest getAliasesRequest, RequestOptions options) throws IOException {
|
public boolean existsAlias(GetAliasesRequest getAliasesRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequest(getAliasesRequest, RequestConverters::existsAlias, options,
|
return restHighLevelClient.performRequest(getAliasesRequest, IndicesRequestConverters::existsAlias, options,
|
||||||
RestHighLevelClient::convertExistsResponse, emptySet());
|
RestHighLevelClient::convertExistsResponse, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -324,7 +324,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void existsAliasAsync(GetAliasesRequest getAliasesRequest, RequestOptions options, ActionListener<Boolean> listener) {
|
public void existsAliasAsync(GetAliasesRequest getAliasesRequest, RequestOptions options, ActionListener<Boolean> listener) {
|
||||||
restHighLevelClient.performRequestAsync(getAliasesRequest, RequestConverters::existsAlias, options,
|
restHighLevelClient.performRequestAsync(getAliasesRequest, IndicesRequestConverters::existsAlias, options,
|
||||||
RestHighLevelClient::convertExistsResponse, listener, emptySet());
|
RestHighLevelClient::convertExistsResponse, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -337,7 +337,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public RefreshResponse refresh(RefreshRequest refreshRequest, RequestOptions options) throws IOException {
|
public RefreshResponse refresh(RefreshRequest refreshRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(refreshRequest, RequestConverters::refresh, options,
|
return restHighLevelClient.performRequestAndParseEntity(refreshRequest, IndicesRequestConverters::refresh, options,
|
||||||
RefreshResponse::fromXContent, emptySet());
|
RefreshResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -349,7 +349,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void refreshAsync(RefreshRequest refreshRequest, RequestOptions options, ActionListener<RefreshResponse> listener) {
|
public void refreshAsync(RefreshRequest refreshRequest, RequestOptions options, ActionListener<RefreshResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(refreshRequest, RequestConverters::refresh, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(refreshRequest, IndicesRequestConverters::refresh, options,
|
||||||
RefreshResponse::fromXContent, listener, emptySet());
|
RefreshResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -362,7 +362,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public FlushResponse flush(FlushRequest flushRequest, RequestOptions options) throws IOException {
|
public FlushResponse flush(FlushRequest flushRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(flushRequest, RequestConverters::flush, options,
|
return restHighLevelClient.performRequestAndParseEntity(flushRequest, IndicesRequestConverters::flush, options,
|
||||||
FlushResponse::fromXContent, emptySet());
|
FlushResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -374,7 +374,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void flushAsync(FlushRequest flushRequest, RequestOptions options, ActionListener<FlushResponse> listener) {
|
public void flushAsync(FlushRequest flushRequest, RequestOptions options, ActionListener<FlushResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(flushRequest, RequestConverters::flush, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(flushRequest, IndicesRequestConverters::flush, options,
|
||||||
FlushResponse::fromXContent, listener, emptySet());
|
FlushResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -388,7 +388,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public SyncedFlushResponse flushSynced(SyncedFlushRequest syncedFlushRequest, RequestOptions options) throws IOException {
|
public SyncedFlushResponse flushSynced(SyncedFlushRequest syncedFlushRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(syncedFlushRequest, RequestConverters::flushSynced, options,
|
return restHighLevelClient.performRequestAndParseEntity(syncedFlushRequest, IndicesRequestConverters::flushSynced, options,
|
||||||
SyncedFlushResponse::fromXContent, emptySet());
|
SyncedFlushResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -402,7 +402,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void flushSyncedAsync(SyncedFlushRequest syncedFlushRequest, RequestOptions options,
|
public void flushSyncedAsync(SyncedFlushRequest syncedFlushRequest, RequestOptions options,
|
||||||
ActionListener<SyncedFlushResponse> listener) {
|
ActionListener<SyncedFlushResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(syncedFlushRequest, RequestConverters::flushSynced, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(syncedFlushRequest, IndicesRequestConverters::flushSynced, options,
|
||||||
SyncedFlushResponse::fromXContent, listener, emptySet());
|
SyncedFlushResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -416,7 +416,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public GetSettingsResponse getSettings(GetSettingsRequest getSettingsRequest, RequestOptions options) throws IOException {
|
public GetSettingsResponse getSettings(GetSettingsRequest getSettingsRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(getSettingsRequest, RequestConverters::getSettings, options,
|
return restHighLevelClient.performRequestAndParseEntity(getSettingsRequest, IndicesRequestConverters::getSettings, options,
|
||||||
GetSettingsResponse::fromXContent, emptySet());
|
GetSettingsResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -430,7 +430,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void getSettingsAsync(GetSettingsRequest getSettingsRequest, RequestOptions options,
|
public void getSettingsAsync(GetSettingsRequest getSettingsRequest, RequestOptions options,
|
||||||
ActionListener<GetSettingsResponse> listener) {
|
ActionListener<GetSettingsResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(getSettingsRequest, RequestConverters::getSettings, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(getSettingsRequest, IndicesRequestConverters::getSettings, options,
|
||||||
GetSettingsResponse::fromXContent, listener, emptySet());
|
GetSettingsResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -444,7 +444,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public GetIndexResponse get(GetIndexRequest getIndexRequest, RequestOptions options) throws IOException {
|
public GetIndexResponse get(GetIndexRequest getIndexRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(getIndexRequest, RequestConverters::getIndex, options,
|
return restHighLevelClient.performRequestAndParseEntity(getIndexRequest, IndicesRequestConverters::getIndex, options,
|
||||||
GetIndexResponse::fromXContent, emptySet());
|
GetIndexResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -458,7 +458,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void getAsync(GetIndexRequest getIndexRequest, RequestOptions options,
|
public void getAsync(GetIndexRequest getIndexRequest, RequestOptions options,
|
||||||
ActionListener<GetIndexResponse> listener) {
|
ActionListener<GetIndexResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(getIndexRequest, RequestConverters::getIndex, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(getIndexRequest, IndicesRequestConverters::getIndex, options,
|
||||||
GetIndexResponse::fromXContent, listener, emptySet());
|
GetIndexResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -487,7 +487,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public ForceMergeResponse forcemerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException {
|
public ForceMergeResponse forcemerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options,
|
return restHighLevelClient.performRequestAndParseEntity(forceMergeRequest, IndicesRequestConverters::forceMerge, options,
|
||||||
ForceMergeResponse::fromXContent, emptySet());
|
ForceMergeResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -514,7 +514,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void forcemergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener<ForceMergeResponse> listener) {
|
public void forcemergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener<ForceMergeResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(forceMergeRequest, IndicesRequestConverters::forceMerge, options,
|
||||||
ForceMergeResponse::fromXContent, listener, emptySet());
|
ForceMergeResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -529,7 +529,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public ClearIndicesCacheResponse clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest,
|
public ClearIndicesCacheResponse clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest,
|
||||||
RequestOptions options) throws IOException {
|
RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(clearIndicesCacheRequest, RequestConverters::clearCache, options,
|
return restHighLevelClient.performRequestAndParseEntity(clearIndicesCacheRequest, IndicesRequestConverters::clearCache, options,
|
||||||
ClearIndicesCacheResponse::fromXContent, emptySet());
|
ClearIndicesCacheResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -543,7 +543,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void clearCacheAsync(ClearIndicesCacheRequest clearIndicesCacheRequest, RequestOptions options,
|
public void clearCacheAsync(ClearIndicesCacheRequest clearIndicesCacheRequest, RequestOptions options,
|
||||||
ActionListener<ClearIndicesCacheResponse> listener) {
|
ActionListener<ClearIndicesCacheResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(clearIndicesCacheRequest, RequestConverters::clearCache, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(clearIndicesCacheRequest, IndicesRequestConverters::clearCache, options,
|
||||||
ClearIndicesCacheResponse::fromXContent, listener, emptySet());
|
ClearIndicesCacheResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -559,7 +559,7 @@ public final class IndicesClient {
|
|||||||
public boolean exists(GetIndexRequest request, RequestOptions options) throws IOException {
|
public boolean exists(GetIndexRequest request, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequest(
|
return restHighLevelClient.performRequest(
|
||||||
request,
|
request,
|
||||||
RequestConverters::indicesExist,
|
IndicesRequestConverters::indicesExist,
|
||||||
options,
|
options,
|
||||||
RestHighLevelClient::convertExistsResponse,
|
RestHighLevelClient::convertExistsResponse,
|
||||||
Collections.emptySet()
|
Collections.emptySet()
|
||||||
@ -577,7 +577,7 @@ public final class IndicesClient {
|
|||||||
public void existsAsync(GetIndexRequest request, RequestOptions options, ActionListener<Boolean> listener) {
|
public void existsAsync(GetIndexRequest request, RequestOptions options, ActionListener<Boolean> listener) {
|
||||||
restHighLevelClient.performRequestAsync(
|
restHighLevelClient.performRequestAsync(
|
||||||
request,
|
request,
|
||||||
RequestConverters::indicesExist,
|
IndicesRequestConverters::indicesExist,
|
||||||
options,
|
options,
|
||||||
RestHighLevelClient::convertExistsResponse,
|
RestHighLevelClient::convertExistsResponse,
|
||||||
listener,
|
listener,
|
||||||
@ -595,7 +595,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public ResizeResponse shrink(ResizeRequest resizeRequest, RequestOptions options) throws IOException {
|
public ResizeResponse shrink(ResizeRequest resizeRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(resizeRequest, RequestConverters::shrink, options,
|
return restHighLevelClient.performRequestAndParseEntity(resizeRequest, IndicesRequestConverters::shrink, options,
|
||||||
ResizeResponse::fromXContent, emptySet());
|
ResizeResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -608,7 +608,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void shrinkAsync(ResizeRequest resizeRequest, RequestOptions options, ActionListener<ResizeResponse> listener) {
|
public void shrinkAsync(ResizeRequest resizeRequest, RequestOptions options, ActionListener<ResizeResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(resizeRequest, RequestConverters::shrink, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(resizeRequest, IndicesRequestConverters::shrink, options,
|
||||||
ResizeResponse::fromXContent, listener, emptySet());
|
ResizeResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -622,7 +622,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public ResizeResponse split(ResizeRequest resizeRequest, RequestOptions options) throws IOException {
|
public ResizeResponse split(ResizeRequest resizeRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(resizeRequest, RequestConverters::split, options,
|
return restHighLevelClient.performRequestAndParseEntity(resizeRequest, IndicesRequestConverters::split, options,
|
||||||
ResizeResponse::fromXContent, emptySet());
|
ResizeResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -635,7 +635,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void splitAsync(ResizeRequest resizeRequest, RequestOptions options, ActionListener<ResizeResponse> listener) {
|
public void splitAsync(ResizeRequest resizeRequest, RequestOptions options, ActionListener<ResizeResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(resizeRequest, RequestConverters::split, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(resizeRequest, IndicesRequestConverters::split, options,
|
||||||
ResizeResponse::fromXContent, listener, emptySet());
|
ResizeResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -649,7 +649,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public RolloverResponse rollover(RolloverRequest rolloverRequest, RequestOptions options) throws IOException {
|
public RolloverResponse rollover(RolloverRequest rolloverRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(rolloverRequest, RequestConverters::rollover, options,
|
return restHighLevelClient.performRequestAndParseEntity(rolloverRequest, IndicesRequestConverters::rollover, options,
|
||||||
RolloverResponse::fromXContent, emptySet());
|
RolloverResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -662,7 +662,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void rolloverAsync(RolloverRequest rolloverRequest, RequestOptions options, ActionListener<RolloverResponse> listener) {
|
public void rolloverAsync(RolloverRequest rolloverRequest, RequestOptions options, ActionListener<RolloverResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(rolloverRequest, RequestConverters::rollover, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(rolloverRequest, IndicesRequestConverters::rollover, options,
|
||||||
RolloverResponse::fromXContent, listener, emptySet());
|
RolloverResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -676,7 +676,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public GetAliasesResponse getAlias(GetAliasesRequest getAliasesRequest, RequestOptions options) throws IOException {
|
public GetAliasesResponse getAlias(GetAliasesRequest getAliasesRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(getAliasesRequest, RequestConverters::getAlias, options,
|
return restHighLevelClient.performRequestAndParseEntity(getAliasesRequest, IndicesRequestConverters::getAlias, options,
|
||||||
GetAliasesResponse::fromXContent, singleton(RestStatus.NOT_FOUND.getStatus()));
|
GetAliasesResponse::fromXContent, singleton(RestStatus.NOT_FOUND.getStatus()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -689,7 +689,7 @@ public final class IndicesClient {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void getAliasAsync(GetAliasesRequest getAliasesRequest, RequestOptions options, ActionListener<GetAliasesResponse> listener) {
|
public void getAliasAsync(GetAliasesRequest getAliasesRequest, RequestOptions options, ActionListener<GetAliasesResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(getAliasesRequest, RequestConverters::getAlias, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(getAliasesRequest, IndicesRequestConverters::getAlias, options,
|
||||||
GetAliasesResponse::fromXContent, listener, singleton(RestStatus.NOT_FOUND.getStatus()));
|
GetAliasesResponse::fromXContent, listener, singleton(RestStatus.NOT_FOUND.getStatus()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -703,7 +703,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public AcknowledgedResponse putSettings(UpdateSettingsRequest updateSettingsRequest, RequestOptions options) throws IOException {
|
public AcknowledgedResponse putSettings(UpdateSettingsRequest updateSettingsRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(updateSettingsRequest, RequestConverters::indexPutSettings, options,
|
return restHighLevelClient.performRequestAndParseEntity(updateSettingsRequest, IndicesRequestConverters::indexPutSettings, options,
|
||||||
AcknowledgedResponse::fromXContent, emptySet());
|
AcknowledgedResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -717,7 +717,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void putSettingsAsync(UpdateSettingsRequest updateSettingsRequest, RequestOptions options,
|
public void putSettingsAsync(UpdateSettingsRequest updateSettingsRequest, RequestOptions options,
|
||||||
ActionListener<AcknowledgedResponse> listener) {
|
ActionListener<AcknowledgedResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(updateSettingsRequest, RequestConverters::indexPutSettings, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(updateSettingsRequest, IndicesRequestConverters::indexPutSettings, options,
|
||||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -732,7 +732,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public AcknowledgedResponse putTemplate(PutIndexTemplateRequest putIndexTemplateRequest,
|
public AcknowledgedResponse putTemplate(PutIndexTemplateRequest putIndexTemplateRequest,
|
||||||
RequestOptions options) throws IOException {
|
RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate, options,
|
return restHighLevelClient.performRequestAndParseEntity(putIndexTemplateRequest, IndicesRequestConverters::putTemplate, options,
|
||||||
AcknowledgedResponse::fromXContent, emptySet());
|
AcknowledgedResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -746,7 +746,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void putTemplateAsync(PutIndexTemplateRequest putIndexTemplateRequest, RequestOptions options,
|
public void putTemplateAsync(PutIndexTemplateRequest putIndexTemplateRequest, RequestOptions options,
|
||||||
ActionListener<AcknowledgedResponse> listener) {
|
ActionListener<AcknowledgedResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(putIndexTemplateRequest, IndicesRequestConverters::putTemplate, options,
|
||||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -761,7 +761,7 @@ public final class IndicesClient {
|
|||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public ValidateQueryResponse validateQuery(ValidateQueryRequest validateQueryRequest, RequestOptions options) throws IOException {
|
public ValidateQueryResponse validateQuery(ValidateQueryRequest validateQueryRequest, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options,
|
return restHighLevelClient.performRequestAndParseEntity(validateQueryRequest, IndicesRequestConverters::validateQuery, options,
|
||||||
ValidateQueryResponse::fromXContent, emptySet());
|
ValidateQueryResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -776,7 +776,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void validateQueryAsync(ValidateQueryRequest validateQueryRequest, RequestOptions options,
|
public void validateQueryAsync(ValidateQueryRequest validateQueryRequest, RequestOptions options,
|
||||||
ActionListener<ValidateQueryResponse> listener) {
|
ActionListener<ValidateQueryResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(validateQueryRequest, IndicesRequestConverters::validateQuery, options,
|
||||||
ValidateQueryResponse::fromXContent, listener, emptySet());
|
ValidateQueryResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -791,7 +791,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public GetIndexTemplatesResponse getTemplate(GetIndexTemplatesRequest getIndexTemplatesRequest,
|
public GetIndexTemplatesResponse getTemplate(GetIndexTemplatesRequest getIndexTemplatesRequest,
|
||||||
RequestOptions options) throws IOException {
|
RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(getIndexTemplatesRequest, RequestConverters::getTemplates,
|
return restHighLevelClient.performRequestAndParseEntity(getIndexTemplatesRequest, IndicesRequestConverters::getTemplates,
|
||||||
options, GetIndexTemplatesResponse::fromXContent, emptySet());
|
options, GetIndexTemplatesResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -805,7 +805,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void getTemplateAsync(GetIndexTemplatesRequest getIndexTemplatesRequest, RequestOptions options,
|
public void getTemplateAsync(GetIndexTemplatesRequest getIndexTemplatesRequest, RequestOptions options,
|
||||||
ActionListener<GetIndexTemplatesResponse> listener) {
|
ActionListener<GetIndexTemplatesResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(getIndexTemplatesRequest, RequestConverters::getTemplates,
|
restHighLevelClient.performRequestAsyncAndParseEntity(getIndexTemplatesRequest, IndicesRequestConverters::getTemplates,
|
||||||
options, GetIndexTemplatesResponse::fromXContent, listener, emptySet());
|
options, GetIndexTemplatesResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -818,7 +818,7 @@ public final class IndicesClient {
|
|||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
*/
|
*/
|
||||||
public AnalyzeResponse analyze(AnalyzeRequest request, RequestOptions options) throws IOException {
|
public AnalyzeResponse analyze(AnalyzeRequest request, RequestOptions options) throws IOException {
|
||||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::analyze, options,
|
return restHighLevelClient.performRequestAndParseEntity(request, IndicesRequestConverters::analyze, options,
|
||||||
AnalyzeResponse::fromXContent, emptySet());
|
AnalyzeResponse::fromXContent, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -833,7 +833,7 @@ public final class IndicesClient {
|
|||||||
*/
|
*/
|
||||||
public void analyzeAsync(AnalyzeRequest request, RequestOptions options,
|
public void analyzeAsync(AnalyzeRequest request, RequestOptions options,
|
||||||
ActionListener<AnalyzeResponse> listener) {
|
ActionListener<AnalyzeResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::analyze, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(request, IndicesRequestConverters::analyze, options,
|
||||||
AnalyzeResponse::fromXContent, listener, emptySet());
|
AnalyzeResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,403 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.client.methods.HttpDelete;
|
||||||
|
import org.apache.http.client.methods.HttpGet;
|
||||||
|
import org.apache.http.client.methods.HttpHead;
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
|
import org.apache.http.client.methods.HttpPut;
|
||||||
|
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
||||||
|
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
public class IndicesRequestConverters {
|
||||||
|
static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) {
|
||||||
|
String endpoint = RequestConverters.endpoint(deleteIndexRequest.indices());
|
||||||
|
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withTimeout(deleteIndexRequest.timeout());
|
||||||
|
parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout());
|
||||||
|
parameters.withIndicesOptions(deleteIndexRequest.indicesOptions());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request openIndex(OpenIndexRequest openIndexRequest) {
|
||||||
|
String endpoint = RequestConverters.endpoint(openIndexRequest.indices(), "_open");
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withTimeout(openIndexRequest.timeout());
|
||||||
|
parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout());
|
||||||
|
parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards());
|
||||||
|
parameters.withIndicesOptions(openIndexRequest.indicesOptions());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request closeIndex(CloseIndexRequest closeIndexRequest) {
|
||||||
|
String endpoint = RequestConverters.endpoint(closeIndexRequest.indices(), "_close");
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withTimeout(closeIndexRequest.timeout());
|
||||||
|
parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout());
|
||||||
|
parameters.withIndicesOptions(closeIndexRequest.indicesOptions());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request createIndex(CreateIndexRequest createIndexRequest) throws IOException {
|
||||||
|
String endpoint = RequestConverters.endpoint(createIndexRequest.indices());
|
||||||
|
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withTimeout(createIndexRequest.timeout());
|
||||||
|
parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout());
|
||||||
|
parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards());
|
||||||
|
|
||||||
|
request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException {
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, "/_aliases");
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withTimeout(indicesAliasesRequest.timeout());
|
||||||
|
parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout());
|
||||||
|
|
||||||
|
request.setEntity(RequestConverters.createEntity(indicesAliasesRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request putMapping(PutMappingRequest putMappingRequest) throws IOException {
|
||||||
|
// The concreteIndex is an internal concept, not applicable to requests made over the REST API.
|
||||||
|
if (putMappingRequest.getConcreteIndex() != null) {
|
||||||
|
throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API");
|
||||||
|
}
|
||||||
|
|
||||||
|
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping",
|
||||||
|
putMappingRequest.type()));
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withTimeout(putMappingRequest.timeout());
|
||||||
|
parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout());
|
||||||
|
|
||||||
|
request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOException {
|
||||||
|
String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices();
|
||||||
|
String[] types = getMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.types();
|
||||||
|
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping", types));
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout());
|
||||||
|
parameters.withIndicesOptions(getMappingsRequest.indicesOptions());
|
||||||
|
parameters.withLocal(getMappingsRequest.local());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) throws IOException {
|
||||||
|
String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices();
|
||||||
|
String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types();
|
||||||
|
String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields();
|
||||||
|
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder().addCommaSeparatedPathParts(indices)
|
||||||
|
.addPathPartAsIs("_mapping").addCommaSeparatedPathParts(types)
|
||||||
|
.addPathPartAsIs("field").addCommaSeparatedPathParts(fields)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions());
|
||||||
|
parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults());
|
||||||
|
parameters.withLocal(getFieldMappingsRequest.local());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request refresh(RefreshRequest refreshRequest) {
|
||||||
|
String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices();
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh"));
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withIndicesOptions(refreshRequest.indicesOptions());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request flush(FlushRequest flushRequest) {
|
||||||
|
String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices();
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush"));
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withIndicesOptions(flushRequest.indicesOptions());
|
||||||
|
parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing()));
|
||||||
|
parameters.putParam("force", Boolean.toString(flushRequest.force()));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request flushSynced(SyncedFlushRequest syncedFlushRequest) {
|
||||||
|
String[] indices = syncedFlushRequest.indices() == null ? Strings.EMPTY_ARRAY : syncedFlushRequest.indices();
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush/synced"));
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withIndicesOptions(syncedFlushRequest.indicesOptions());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request forceMerge(ForceMergeRequest forceMergeRequest) {
|
||||||
|
String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices();
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_forcemerge"));
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withIndicesOptions(forceMergeRequest.indicesOptions());
|
||||||
|
parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments()));
|
||||||
|
parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes()));
|
||||||
|
parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush()));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest) {
|
||||||
|
String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices();
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_cache/clear"));
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions());
|
||||||
|
parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache()));
|
||||||
|
parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache()));
|
||||||
|
parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache()));
|
||||||
|
parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields()));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request existsAlias(GetAliasesRequest getAliasesRequest) {
|
||||||
|
if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) &&
|
||||||
|
(getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) {
|
||||||
|
throw new IllegalArgumentException("existsAlias requires at least an alias or an index");
|
||||||
|
}
|
||||||
|
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
|
||||||
|
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
|
||||||
|
|
||||||
|
Request request = new Request(HttpHead.METHOD_NAME, RequestConverters.endpoint(indices, "_alias", aliases));
|
||||||
|
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withIndicesOptions(getAliasesRequest.indicesOptions());
|
||||||
|
params.withLocal(getAliasesRequest.local());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request split(ResizeRequest resizeRequest) throws IOException {
|
||||||
|
if (resizeRequest.getResizeType() != ResizeType.SPLIT) {
|
||||||
|
throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices split request");
|
||||||
|
}
|
||||||
|
return resize(resizeRequest);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request shrink(ResizeRequest resizeRequest) throws IOException {
|
||||||
|
if (resizeRequest.getResizeType() != ResizeType.SHRINK) {
|
||||||
|
throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices shrink request");
|
||||||
|
}
|
||||||
|
return resize(resizeRequest);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Request resize(ResizeRequest resizeRequest) throws IOException {
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(resizeRequest.getSourceIndex())
|
||||||
|
.addPathPartAsIs("_" + resizeRequest.getResizeType().name().toLowerCase(Locale.ROOT))
|
||||||
|
.addPathPart(resizeRequest.getTargetIndexRequest().index()).build();
|
||||||
|
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withTimeout(resizeRequest.timeout());
|
||||||
|
params.withMasterTimeout(resizeRequest.masterNodeTimeout());
|
||||||
|
params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards());
|
||||||
|
|
||||||
|
request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
|
||||||
|
.addPathPart(rolloverRequest.getNewIndexName()).build();
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withTimeout(rolloverRequest.timeout());
|
||||||
|
params.withMasterTimeout(rolloverRequest.masterNodeTimeout());
|
||||||
|
params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards());
|
||||||
|
if (rolloverRequest.isDryRun()) {
|
||||||
|
params.putParam("dry_run", Boolean.TRUE.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request getSettings(GetSettingsRequest getSettingsRequest) {
|
||||||
|
String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices();
|
||||||
|
String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names();
|
||||||
|
|
||||||
|
String endpoint = RequestConverters.endpoint(indices, "_settings", names);
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withIndicesOptions(getSettingsRequest.indicesOptions());
|
||||||
|
params.withLocal(getSettingsRequest.local());
|
||||||
|
params.withIncludeDefaults(getSettingsRequest.includeDefaults());
|
||||||
|
params.withMasterTimeout(getSettingsRequest.masterNodeTimeout());
|
||||||
|
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request getIndex(GetIndexRequest getIndexRequest) {
|
||||||
|
String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices();
|
||||||
|
|
||||||
|
String endpoint = RequestConverters.endpoint(indices);
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
||||||
|
params.withLocal(getIndexRequest.local());
|
||||||
|
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
||||||
|
params.withHuman(getIndexRequest.humanReadable());
|
||||||
|
params.withMasterTimeout(getIndexRequest.masterNodeTimeout());
|
||||||
|
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request indicesExist(GetIndexRequest getIndexRequest) {
|
||||||
|
// this can be called with no indices as argument by transport client, not via REST though
|
||||||
|
if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) {
|
||||||
|
throw new IllegalArgumentException("indices are mandatory");
|
||||||
|
}
|
||||||
|
String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), "");
|
||||||
|
Request request = new Request(HttpHead.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withLocal(getIndexRequest.local());
|
||||||
|
params.withHuman(getIndexRequest.humanReadable());
|
||||||
|
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
||||||
|
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) throws IOException {
|
||||||
|
String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices();
|
||||||
|
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(indices, "_settings"));
|
||||||
|
|
||||||
|
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||||
|
parameters.withTimeout(updateSettingsRequest.timeout());
|
||||||
|
parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout());
|
||||||
|
parameters.withIndicesOptions(updateSettingsRequest.indicesOptions());
|
||||||
|
parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting());
|
||||||
|
|
||||||
|
request.setEntity(RequestConverters.createEntity(updateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
|
||||||
|
.addPathPart(putIndexTemplateRequest.name()).build();
|
||||||
|
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
|
||||||
|
if (putIndexTemplateRequest.create()) {
|
||||||
|
params.putParam("create", Boolean.TRUE.toString());
|
||||||
|
}
|
||||||
|
if (Strings.hasText(putIndexTemplateRequest.cause())) {
|
||||||
|
params.putParam("cause", putIndexTemplateRequest.cause());
|
||||||
|
}
|
||||||
|
request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException {
|
||||||
|
String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices();
|
||||||
|
String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types();
|
||||||
|
String endpoint = RequestConverters.endpoint(indices, types, "_validate/query");
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withIndicesOptions(validateQueryRequest.indicesOptions());
|
||||||
|
params.putParam("explain", Boolean.toString(validateQueryRequest.explain()));
|
||||||
|
params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards()));
|
||||||
|
params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
|
||||||
|
request.setEntity(RequestConverters.createEntity(validateQueryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request getAlias(GetAliasesRequest getAliasesRequest) {
|
||||||
|
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
|
||||||
|
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
|
||||||
|
String endpoint = RequestConverters.endpoint(indices, "_alias", aliases);
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withIndicesOptions(getAliasesRequest.indicesOptions());
|
||||||
|
params.withLocal(getAliasesRequest.local());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) throws IOException {
|
||||||
|
String[] names = getIndexTemplatesRequest.names();
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template").addCommaSeparatedPathParts(names).build();
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withLocal(getIndexTemplatesRequest.local());
|
||||||
|
params.withMasterTimeout(getIndexTemplatesRequest.masterNodeTimeout());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Request analyze(AnalyzeRequest request) throws IOException {
|
||||||
|
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder();
|
||||||
|
String index = request.index();
|
||||||
|
if (index != null) {
|
||||||
|
builder.addPathPart(index);
|
||||||
|
}
|
||||||
|
builder.addPathPartAsIs("_analyze");
|
||||||
|
Request req = new Request(HttpGet.METHOD_NAME, builder.build());
|
||||||
|
req.setEntity(RequestConverters.createEntity(request, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return req;
|
||||||
|
}
|
||||||
|
}
|
@ -34,6 +34,7 @@ import org.elasticsearch.client.ml.DeleteJobRequest;
|
|||||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetCalendarsRequest;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||||
@ -229,7 +230,7 @@ final class MLRequestConverters {
|
|||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) throws IOException {
|
static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) {
|
||||||
String endpoint = new EndpointBuilder()
|
String endpoint = new EndpointBuilder()
|
||||||
.addPathPartAsIs("_xpack")
|
.addPathPartAsIs("_xpack")
|
||||||
.addPathPartAsIs("ml")
|
.addPathPartAsIs("ml")
|
||||||
@ -305,7 +306,7 @@ final class MLRequestConverters {
|
|||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Request postData(PostDataRequest postDataRequest) throws IOException {
|
static Request postData(PostDataRequest postDataRequest) {
|
||||||
String endpoint = new EndpointBuilder()
|
String endpoint = new EndpointBuilder()
|
||||||
.addPathPartAsIs("_xpack")
|
.addPathPartAsIs("_xpack")
|
||||||
.addPathPartAsIs("ml")
|
.addPathPartAsIs("ml")
|
||||||
@ -359,4 +360,16 @@ final class MLRequestConverters {
|
|||||||
request.setEntity(createEntity(putCalendarRequest, REQUEST_BODY_CONTENT_TYPE));
|
request.setEntity(createEntity(putCalendarRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Request getCalendars(GetCalendarsRequest getCalendarsRequest) throws IOException {
|
||||||
|
String endpoint = new EndpointBuilder()
|
||||||
|
.addPathPartAsIs("_xpack")
|
||||||
|
.addPathPartAsIs("ml")
|
||||||
|
.addPathPartAsIs("calendars")
|
||||||
|
.addPathPart(getCalendarsRequest.getCalendarId())
|
||||||
|
.build();
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
request.setEntity(createEntity(getCalendarsRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
return request;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -31,6 +31,8 @@ import org.elasticsearch.client.ml.ForecastJobRequest;
|
|||||||
import org.elasticsearch.client.ml.ForecastJobResponse;
|
import org.elasticsearch.client.ml.ForecastJobResponse;
|
||||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsResponse;
|
import org.elasticsearch.client.ml.GetBucketsResponse;
|
||||||
|
import org.elasticsearch.client.ml.GetCalendarsRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetCalendarsResponse;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
||||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
@ -792,6 +794,44 @@ public final class MachineLearningClient {
|
|||||||
Collections.emptySet());
|
Collections.emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a single or multiple calendars.
|
||||||
|
* <p>
|
||||||
|
* For additional info
|
||||||
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-calendar.html">ML GET calendars documentation</a>
|
||||||
|
*
|
||||||
|
* @param request The calendars request
|
||||||
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @return {@link GetCalendarsResponse} response object containing the {@link org.elasticsearch.client.ml.calendars.Calendar}
|
||||||
|
* objects and the number of calendars found
|
||||||
|
*/
|
||||||
|
public GetCalendarsResponse getCalendars(GetCalendarsRequest request, RequestOptions options) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||||
|
MLRequestConverters::getCalendars,
|
||||||
|
options,
|
||||||
|
GetCalendarsResponse::fromXContent,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a single or multiple calendars, notifies listener once the requested records are retrieved.
|
||||||
|
* <p>
|
||||||
|
* For additional info
|
||||||
|
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-calendar.html">ML GET calendars documentation</a>
|
||||||
|
*
|
||||||
|
* @param request The calendars request
|
||||||
|
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @param listener Listener to be notified upon request completion
|
||||||
|
*/
|
||||||
|
public void getCalendarsAsync(GetCalendarsRequest request, RequestOptions options, ActionListener<GetCalendarsResponse> listener) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||||
|
MLRequestConverters::getCalendars,
|
||||||
|
options,
|
||||||
|
GetCalendarsResponse::fromXContent,
|
||||||
|
listener,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the influencers for a Machine Learning Job.
|
* Gets the influencers for a Machine Learning Job.
|
||||||
* <p>
|
* <p>
|
||||||
|
@ -33,30 +33,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
|||||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
|
||||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
|
||||||
import org.elasticsearch.action.bulk.BulkRequest;
|
import org.elasticsearch.action.bulk.BulkRequest;
|
||||||
import org.elasticsearch.action.delete.DeleteRequest;
|
import org.elasticsearch.action.delete.DeleteRequest;
|
||||||
import org.elasticsearch.action.explain.ExplainRequest;
|
import org.elasticsearch.action.explain.ExplainRequest;
|
||||||
@ -138,165 +115,6 @@ final class RequestConverters {
|
|||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) {
|
|
||||||
String endpoint = endpoint(deleteIndexRequest.indices());
|
|
||||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withTimeout(deleteIndexRequest.timeout());
|
|
||||||
parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout());
|
|
||||||
parameters.withIndicesOptions(deleteIndexRequest.indicesOptions());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request openIndex(OpenIndexRequest openIndexRequest) {
|
|
||||||
String endpoint = endpoint(openIndexRequest.indices(), "_open");
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withTimeout(openIndexRequest.timeout());
|
|
||||||
parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout());
|
|
||||||
parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards());
|
|
||||||
parameters.withIndicesOptions(openIndexRequest.indicesOptions());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request closeIndex(CloseIndexRequest closeIndexRequest) {
|
|
||||||
String endpoint = endpoint(closeIndexRequest.indices(), "_close");
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withTimeout(closeIndexRequest.timeout());
|
|
||||||
parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout());
|
|
||||||
parameters.withIndicesOptions(closeIndexRequest.indicesOptions());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request createIndex(CreateIndexRequest createIndexRequest) throws IOException {
|
|
||||||
String endpoint = endpoint(createIndexRequest.indices());
|
|
||||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withTimeout(createIndexRequest.timeout());
|
|
||||||
parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout());
|
|
||||||
parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards());
|
|
||||||
|
|
||||||
request.setEntity(createEntity(createIndexRequest, REQUEST_BODY_CONTENT_TYPE));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException {
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, "/_aliases");
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withTimeout(indicesAliasesRequest.timeout());
|
|
||||||
parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout());
|
|
||||||
|
|
||||||
request.setEntity(createEntity(indicesAliasesRequest, REQUEST_BODY_CONTENT_TYPE));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request putMapping(PutMappingRequest putMappingRequest) throws IOException {
|
|
||||||
// The concreteIndex is an internal concept, not applicable to requests made over the REST API.
|
|
||||||
if (putMappingRequest.getConcreteIndex() != null) {
|
|
||||||
throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API");
|
|
||||||
}
|
|
||||||
|
|
||||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint(putMappingRequest.indices(), "_mapping", putMappingRequest.type()));
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withTimeout(putMappingRequest.timeout());
|
|
||||||
parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout());
|
|
||||||
|
|
||||||
request.setEntity(createEntity(putMappingRequest, REQUEST_BODY_CONTENT_TYPE));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOException {
|
|
||||||
String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices();
|
|
||||||
String[] types = getMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.types();
|
|
||||||
|
|
||||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint(indices, "_mapping", types));
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout());
|
|
||||||
parameters.withIndicesOptions(getMappingsRequest.indicesOptions());
|
|
||||||
parameters.withLocal(getMappingsRequest.local());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) throws IOException {
|
|
||||||
String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices();
|
|
||||||
String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types();
|
|
||||||
String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields();
|
|
||||||
|
|
||||||
String endpoint = new EndpointBuilder().addCommaSeparatedPathParts(indices)
|
|
||||||
.addPathPartAsIs("_mapping").addCommaSeparatedPathParts(types)
|
|
||||||
.addPathPartAsIs("field").addCommaSeparatedPathParts(fields)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions());
|
|
||||||
parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults());
|
|
||||||
parameters.withLocal(getFieldMappingsRequest.local());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request refresh(RefreshRequest refreshRequest) {
|
|
||||||
String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices();
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_refresh"));
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withIndicesOptions(refreshRequest.indicesOptions());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request flush(FlushRequest flushRequest) {
|
|
||||||
String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices();
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_flush"));
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withIndicesOptions(flushRequest.indicesOptions());
|
|
||||||
parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing()));
|
|
||||||
parameters.putParam("force", Boolean.toString(flushRequest.force()));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request flushSynced(SyncedFlushRequest syncedFlushRequest) {
|
|
||||||
String[] indices = syncedFlushRequest.indices() == null ? Strings.EMPTY_ARRAY : syncedFlushRequest.indices();
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_flush/synced"));
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withIndicesOptions(syncedFlushRequest.indicesOptions());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request forceMerge(ForceMergeRequest forceMergeRequest) {
|
|
||||||
String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices();
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_forcemerge"));
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withIndicesOptions(forceMergeRequest.indicesOptions());
|
|
||||||
parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments()));
|
|
||||||
parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes()));
|
|
||||||
parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush()));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest) {
|
|
||||||
String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices();
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_cache/clear"));
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions());
|
|
||||||
parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache()));
|
|
||||||
parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache()));
|
|
||||||
parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache()));
|
|
||||||
parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields()));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request info() {
|
static Request info() {
|
||||||
return new Request(HttpGet.METHOD_NAME, "/");
|
return new Request(HttpGet.METHOD_NAME, "/");
|
||||||
}
|
}
|
||||||
@ -617,22 +435,6 @@ final class RequestConverters {
|
|||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Request existsAlias(GetAliasesRequest getAliasesRequest) {
|
|
||||||
if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) &&
|
|
||||||
(getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) {
|
|
||||||
throw new IllegalArgumentException("existsAlias requires at least an alias or an index");
|
|
||||||
}
|
|
||||||
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
|
|
||||||
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
|
|
||||||
|
|
||||||
Request request = new Request(HttpHead.METHOD_NAME, endpoint(indices, "_alias", aliases));
|
|
||||||
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withIndicesOptions(getAliasesRequest.indicesOptions());
|
|
||||||
params.withLocal(getAliasesRequest.local());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request explain(ExplainRequest explainRequest) throws IOException {
|
static Request explain(ExplainRequest explainRequest) throws IOException {
|
||||||
Request request = new Request(HttpGet.METHOD_NAME,
|
Request request = new Request(HttpGet.METHOD_NAME,
|
||||||
endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain"));
|
endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain"));
|
||||||
@ -665,35 +467,6 @@ final class RequestConverters {
|
|||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Request split(ResizeRequest resizeRequest) throws IOException {
|
|
||||||
if (resizeRequest.getResizeType() != ResizeType.SPLIT) {
|
|
||||||
throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices split request");
|
|
||||||
}
|
|
||||||
return resize(resizeRequest);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request shrink(ResizeRequest resizeRequest) throws IOException {
|
|
||||||
if (resizeRequest.getResizeType() != ResizeType.SHRINK) {
|
|
||||||
throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices shrink request");
|
|
||||||
}
|
|
||||||
return resize(resizeRequest);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Request resize(ResizeRequest resizeRequest) throws IOException {
|
|
||||||
String endpoint = new EndpointBuilder().addPathPart(resizeRequest.getSourceIndex())
|
|
||||||
.addPathPartAsIs("_" + resizeRequest.getResizeType().name().toLowerCase(Locale.ROOT))
|
|
||||||
.addPathPart(resizeRequest.getTargetIndexRequest().index()).build();
|
|
||||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withTimeout(resizeRequest.timeout());
|
|
||||||
params.withMasterTimeout(resizeRequest.masterNodeTimeout());
|
|
||||||
params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards());
|
|
||||||
|
|
||||||
request.setEntity(createEntity(resizeRequest, REQUEST_BODY_CONTENT_TYPE));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request reindex(ReindexRequest reindexRequest) throws IOException {
|
static Request reindex(ReindexRequest reindexRequest) throws IOException {
|
||||||
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
|
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
@ -762,135 +535,6 @@ final class RequestConverters {
|
|||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
|
|
||||||
String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
|
|
||||||
.addPathPart(rolloverRequest.getNewIndexName()).build();
|
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withTimeout(rolloverRequest.timeout());
|
|
||||||
params.withMasterTimeout(rolloverRequest.masterNodeTimeout());
|
|
||||||
params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards());
|
|
||||||
if (rolloverRequest.isDryRun()) {
|
|
||||||
params.putParam("dry_run", Boolean.TRUE.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
request.setEntity(createEntity(rolloverRequest, REQUEST_BODY_CONTENT_TYPE));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request getSettings(GetSettingsRequest getSettingsRequest) {
|
|
||||||
String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices();
|
|
||||||
String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names();
|
|
||||||
|
|
||||||
String endpoint = endpoint(indices, "_settings", names);
|
|
||||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withIndicesOptions(getSettingsRequest.indicesOptions());
|
|
||||||
params.withLocal(getSettingsRequest.local());
|
|
||||||
params.withIncludeDefaults(getSettingsRequest.includeDefaults());
|
|
||||||
params.withMasterTimeout(getSettingsRequest.masterNodeTimeout());
|
|
||||||
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request getIndex(GetIndexRequest getIndexRequest) {
|
|
||||||
String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices();
|
|
||||||
|
|
||||||
String endpoint = endpoint(indices);
|
|
||||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
|
||||||
params.withLocal(getIndexRequest.local());
|
|
||||||
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
|
||||||
params.withHuman(getIndexRequest.humanReadable());
|
|
||||||
params.withMasterTimeout(getIndexRequest.masterNodeTimeout());
|
|
||||||
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request indicesExist(GetIndexRequest getIndexRequest) {
|
|
||||||
// this can be called with no indices as argument by transport client, not via REST though
|
|
||||||
if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) {
|
|
||||||
throw new IllegalArgumentException("indices are mandatory");
|
|
||||||
}
|
|
||||||
String endpoint = endpoint(getIndexRequest.indices(), "");
|
|
||||||
Request request = new Request(HttpHead.METHOD_NAME, endpoint);
|
|
||||||
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withLocal(getIndexRequest.local());
|
|
||||||
params.withHuman(getIndexRequest.humanReadable());
|
|
||||||
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
|
||||||
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) throws IOException {
|
|
||||||
String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices();
|
|
||||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint(indices, "_settings"));
|
|
||||||
|
|
||||||
Params parameters = new Params(request);
|
|
||||||
parameters.withTimeout(updateSettingsRequest.timeout());
|
|
||||||
parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout());
|
|
||||||
parameters.withIndicesOptions(updateSettingsRequest.indicesOptions());
|
|
||||||
parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting());
|
|
||||||
|
|
||||||
request.setEntity(createEntity(updateSettingsRequest, REQUEST_BODY_CONTENT_TYPE));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
|
|
||||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build();
|
|
||||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
|
|
||||||
if (putIndexTemplateRequest.create()) {
|
|
||||||
params.putParam("create", Boolean.TRUE.toString());
|
|
||||||
}
|
|
||||||
if (Strings.hasText(putIndexTemplateRequest.cause())) {
|
|
||||||
params.putParam("cause", putIndexTemplateRequest.cause());
|
|
||||||
}
|
|
||||||
request.setEntity(createEntity(putIndexTemplateRequest, REQUEST_BODY_CONTENT_TYPE));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException {
|
|
||||||
String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices();
|
|
||||||
String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types();
|
|
||||||
String endpoint = endpoint(indices, types, "_validate/query");
|
|
||||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withIndicesOptions(validateQueryRequest.indicesOptions());
|
|
||||||
params.putParam("explain", Boolean.toString(validateQueryRequest.explain()));
|
|
||||||
params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards()));
|
|
||||||
params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
|
|
||||||
request.setEntity(createEntity(validateQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request getAlias(GetAliasesRequest getAliasesRequest) {
|
|
||||||
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
|
|
||||||
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
|
|
||||||
String endpoint = endpoint(indices, "_alias", aliases);
|
|
||||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withIndicesOptions(getAliasesRequest.indicesOptions());
|
|
||||||
params.withLocal(getAliasesRequest.local());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) throws IOException {
|
|
||||||
String[] names = getIndexTemplatesRequest.names();
|
|
||||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addCommaSeparatedPathParts(names).build();
|
|
||||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
|
||||||
Params params = new Params(request);
|
|
||||||
params.withLocal(getIndexTemplatesRequest.local());
|
|
||||||
params.withMasterTimeout(getIndexTemplatesRequest.masterNodeTimeout());
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException {
|
static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException {
|
||||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build();
|
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build();
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
|
@ -0,0 +1,104 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionRequest;
|
||||||
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
|
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.job.util.PageParams;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
public class GetCalendarsRequest extends ActionRequest implements ToXContentObject {
|
||||||
|
|
||||||
|
public static final ObjectParser<GetCalendarsRequest, Void> PARSER =
|
||||||
|
new ObjectParser<>("get_calendars_request", GetCalendarsRequest::new);
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareString(GetCalendarsRequest::setCalendarId, Calendar.ID);
|
||||||
|
PARSER.declareObject(GetCalendarsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String calendarId;
|
||||||
|
private PageParams pageParams;
|
||||||
|
|
||||||
|
public GetCalendarsRequest() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public GetCalendarsRequest(String calendarId) {
|
||||||
|
this.calendarId = calendarId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCalendarId() {
|
||||||
|
return calendarId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCalendarId(String calendarId) {
|
||||||
|
this.calendarId = calendarId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PageParams getPageParams() {
|
||||||
|
return pageParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPageParams(PageParams pageParams) {
|
||||||
|
this.pageParams = pageParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ActionRequestValidationException validate() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
if (calendarId != null) {
|
||||||
|
builder.field(Calendar.ID.getPreferredName(), calendarId);
|
||||||
|
}
|
||||||
|
if (pageParams != null) {
|
||||||
|
builder.field(PageParams.PAGE.getPreferredName(), pageParams);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(calendarId, pageParams);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
GetCalendarsRequest other = (GetCalendarsRequest) obj;
|
||||||
|
return Objects.equals(calendarId, other.calendarId) && Objects.equals(pageParams, other.pageParams);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,86 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
public class GetCalendarsResponse extends AbstractResultResponse<Calendar> {
|
||||||
|
|
||||||
|
public static final ParseField RESULTS_FIELD = new ParseField("calendars");
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final ConstructingObjectParser<GetCalendarsResponse, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("calendars_response", true,
|
||||||
|
a -> new GetCalendarsResponse((List<Calendar>) a[0], (long) a[1]));
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareObjectArray(constructorArg(), Calendar.PARSER, RESULTS_FIELD);
|
||||||
|
PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static GetCalendarsResponse fromXContent(XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
GetCalendarsResponse(List<Calendar> calendars, long count) {
|
||||||
|
super(RESULTS_FIELD, calendars, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The collection of {@link Calendar} objects found in the query
|
||||||
|
*/
|
||||||
|
public List<Calendar> calendars() {
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(results, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
GetCalendarsResponse other = (GetCalendarsResponse) obj;
|
||||||
|
return Objects.equals(results, other.results) && count == other.count;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
}
|
@ -66,7 +66,6 @@ public class Job implements ToXContentObject {
|
|||||||
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
|
public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days");
|
||||||
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
|
public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id");
|
||||||
public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name");
|
public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name");
|
||||||
public static final ParseField DELETED = new ParseField("deleted");
|
|
||||||
|
|
||||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("job_details", true, Builder::new);
|
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("job_details", true, Builder::new);
|
||||||
|
|
||||||
@ -100,7 +99,6 @@ public class Job implements ToXContentObject {
|
|||||||
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
|
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
|
||||||
PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
|
PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
|
||||||
PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME);
|
PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME);
|
||||||
PARSER.declareBoolean(Builder::setDeleted, DELETED);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private final String jobId;
|
private final String jobId;
|
||||||
@ -123,14 +121,13 @@ public class Job implements ToXContentObject {
|
|||||||
private final Map<String, Object> customSettings;
|
private final Map<String, Object> customSettings;
|
||||||
private final String modelSnapshotId;
|
private final String modelSnapshotId;
|
||||||
private final String resultsIndexName;
|
private final String resultsIndexName;
|
||||||
private final boolean deleted;
|
|
||||||
|
|
||||||
private Job(String jobId, String jobType, List<String> groups, String description, Date createTime,
|
private Job(String jobId, String jobType, List<String> groups, String description, Date createTime,
|
||||||
Date finishedTime, Date lastDataTime, Long establishedModelMemory,
|
Date finishedTime, Date lastDataTime, Long establishedModelMemory,
|
||||||
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
|
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
|
||||||
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
|
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
|
||||||
Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map<String, Object> customSettings,
|
Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map<String, Object> customSettings,
|
||||||
String modelSnapshotId, String resultsIndexName, boolean deleted) {
|
String modelSnapshotId, String resultsIndexName) {
|
||||||
|
|
||||||
this.jobId = jobId;
|
this.jobId = jobId;
|
||||||
this.jobType = jobType;
|
this.jobType = jobType;
|
||||||
@ -151,7 +148,6 @@ public class Job implements ToXContentObject {
|
|||||||
this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings);
|
this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings);
|
||||||
this.modelSnapshotId = modelSnapshotId;
|
this.modelSnapshotId = modelSnapshotId;
|
||||||
this.resultsIndexName = resultsIndexName;
|
this.resultsIndexName = resultsIndexName;
|
||||||
this.deleted = deleted;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -296,10 +292,6 @@ public class Job implements ToXContentObject {
|
|||||||
return modelSnapshotId;
|
return modelSnapshotId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isDeleted() {
|
|
||||||
return deleted;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
@ -359,9 +351,6 @@ public class Job implements ToXContentObject {
|
|||||||
if (resultsIndexName != null) {
|
if (resultsIndexName != null) {
|
||||||
builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName);
|
builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName);
|
||||||
}
|
}
|
||||||
if (params.paramAsBoolean("all", false)) {
|
|
||||||
builder.field(DELETED.getPreferredName(), deleted);
|
|
||||||
}
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
@ -395,8 +384,7 @@ public class Job implements ToXContentObject {
|
|||||||
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
|
&& Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays)
|
||||||
&& Objects.equals(this.customSettings, that.customSettings)
|
&& Objects.equals(this.customSettings, that.customSettings)
|
||||||
&& Objects.equals(this.modelSnapshotId, that.modelSnapshotId)
|
&& Objects.equals(this.modelSnapshotId, that.modelSnapshotId)
|
||||||
&& Objects.equals(this.resultsIndexName, that.resultsIndexName)
|
&& Objects.equals(this.resultsIndexName, that.resultsIndexName);
|
||||||
&& Objects.equals(this.deleted, that.deleted);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -404,7 +392,7 @@ public class Job implements ToXContentObject {
|
|||||||
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
|
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
|
||||||
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||||
modelSnapshotId, resultsIndexName, deleted);
|
modelSnapshotId, resultsIndexName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -437,7 +425,6 @@ public class Job implements ToXContentObject {
|
|||||||
private Map<String, Object> customSettings;
|
private Map<String, Object> customSettings;
|
||||||
private String modelSnapshotId;
|
private String modelSnapshotId;
|
||||||
private String resultsIndexName;
|
private String resultsIndexName;
|
||||||
private boolean deleted;
|
|
||||||
|
|
||||||
private Builder() {
|
private Builder() {
|
||||||
}
|
}
|
||||||
@ -466,7 +453,6 @@ public class Job implements ToXContentObject {
|
|||||||
this.customSettings = job.getCustomSettings();
|
this.customSettings = job.getCustomSettings();
|
||||||
this.modelSnapshotId = job.getModelSnapshotId();
|
this.modelSnapshotId = job.getModelSnapshotId();
|
||||||
this.resultsIndexName = job.getResultsIndexNameNoPrefix();
|
this.resultsIndexName = job.getResultsIndexNameNoPrefix();
|
||||||
this.deleted = job.isDeleted();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder setId(String id) {
|
public Builder setId(String id) {
|
||||||
@ -573,11 +559,6 @@ public class Job implements ToXContentObject {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder setDeleted(boolean deleted) {
|
|
||||||
this.deleted = deleted;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builds a job.
|
* Builds a job.
|
||||||
*
|
*
|
||||||
@ -590,7 +571,7 @@ public class Job implements ToXContentObject {
|
|||||||
id, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
|
id, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory,
|
||||||
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
|
||||||
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
|
||||||
modelSnapshotId, resultsIndexName, deleted);
|
modelSnapshotId, resultsIndexName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,16 +19,14 @@
|
|||||||
package org.elasticsearch.client.ml.job.results;
|
package org.elasticsearch.client.ml.job.results;
|
||||||
|
|
||||||
import org.elasticsearch.client.ml.job.config.Job;
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.client.ml.job.util.TimeUtil;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.time.DateFormatters;
|
|
||||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.time.format.DateTimeFormatter;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -90,15 +88,9 @@ public class AnomalyRecord implements ToXContentObject {
|
|||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
(p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()),
|
||||||
return new Date(p.longValue());
|
Result.TIMESTAMP, ValueType.VALUE);
|
||||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
|
||||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
|
||||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
|
||||||
}, Result.TIMESTAMP, ValueType.VALUE);
|
|
||||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||||
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
|
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
|
||||||
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
|
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
|
||||||
|
@ -19,16 +19,14 @@
|
|||||||
package org.elasticsearch.client.ml.job.results;
|
package org.elasticsearch.client.ml.job.results;
|
||||||
|
|
||||||
import org.elasticsearch.client.ml.job.config.Job;
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.client.ml.job.util.TimeUtil;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.time.DateFormatters;
|
|
||||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.time.format.DateTimeFormatter;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
@ -63,15 +61,8 @@ public class Bucket implements ToXContentObject {
|
|||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
(p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), Result.TIMESTAMP, ValueType.VALUE);
|
||||||
return new Date(p.longValue());
|
|
||||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
|
||||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
|
||||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
|
||||||
}, Result.TIMESTAMP, ValueType.VALUE);
|
|
||||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||||
PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE);
|
PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE);
|
||||||
PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
|
PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE);
|
||||||
|
@ -19,16 +19,14 @@
|
|||||||
package org.elasticsearch.client.ml.job.results;
|
package org.elasticsearch.client.ml.job.results;
|
||||||
|
|
||||||
import org.elasticsearch.client.ml.job.config.Job;
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.client.ml.job.util.TimeUtil;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.time.DateFormatters;
|
|
||||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.time.format.DateTimeFormatter;
|
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
@ -56,15 +54,9 @@ public class BucketInfluencer implements ToXContentObject {
|
|||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
(p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()),
|
||||||
return new Date(p.longValue());
|
Result.TIMESTAMP, ValueType.VALUE);
|
||||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
|
||||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
|
||||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
|
||||||
}, Result.TIMESTAMP, ValueType.VALUE);
|
|
||||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||||
PARSER.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE);
|
PARSER.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE);
|
||||||
PARSER.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME);
|
PARSER.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME);
|
||||||
|
@ -19,16 +19,14 @@
|
|||||||
package org.elasticsearch.client.ml.job.results;
|
package org.elasticsearch.client.ml.job.results;
|
||||||
|
|
||||||
import org.elasticsearch.client.ml.job.config.Job;
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.client.ml.job.util.TimeUtil;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.time.DateFormatters;
|
|
||||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.time.format.DateTimeFormatter;
|
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
@ -61,15 +59,9 @@ public class Influencer implements ToXContentObject {
|
|||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME);
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE);
|
PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE);
|
||||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
(p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()),
|
||||||
return new Date(p.longValue());
|
Result.TIMESTAMP, ValueType.VALUE);
|
||||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
|
||||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
|
||||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
|
||||||
}, Result.TIMESTAMP, ValueType.VALUE);
|
|
||||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||||
PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE);
|
PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE);
|
||||||
PARSER.declareDouble(Influencer::setProbability, PROBABILITY);
|
PARSER.declareDouble(Influencer::setProbability, PROBABILITY);
|
||||||
|
@ -19,16 +19,14 @@
|
|||||||
package org.elasticsearch.client.ml.job.results;
|
package org.elasticsearch.client.ml.job.results;
|
||||||
|
|
||||||
import org.elasticsearch.client.ml.job.config.Job;
|
import org.elasticsearch.client.ml.job.config.Job;
|
||||||
|
import org.elasticsearch.client.ml.job.util.TimeUtil;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.time.DateFormatters;
|
|
||||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.time.format.DateTimeFormatter;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -56,15 +54,9 @@ public class OverallBucket implements ToXContentObject {
|
|||||||
a -> new OverallBucket((Date) a[0], (long) a[1], (double) a[2], (boolean) a[3]));
|
a -> new OverallBucket((Date) a[0], (long) a[1], (double) a[2], (boolean) a[3]));
|
||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
(p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()),
|
||||||
return new Date(p.longValue());
|
Result.TIMESTAMP, ObjectParser.ValueType.VALUE);
|
||||||
} else if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
|
||||||
return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli());
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for ["
|
|
||||||
+ Result.TIMESTAMP.getPreferredName() + "]");
|
|
||||||
}, Result.TIMESTAMP, ObjectParser.ValueType.VALUE);
|
|
||||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
||||||
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), OVERALL_SCORE);
|
PARSER.declareDouble(ConstructingObjectParser.constructorArg(), OVERALL_SCORE);
|
||||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), Result.IS_INTERIM);
|
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), Result.IS_INTERIM);
|
||||||
|
@ -28,7 +28,6 @@ public final class Result {
|
|||||||
/**
|
/**
|
||||||
* Serialisation fields
|
* Serialisation fields
|
||||||
*/
|
*/
|
||||||
public static final ParseField TYPE = new ParseField("result");
|
|
||||||
public static final ParseField RESULT_TYPE = new ParseField("result_type");
|
public static final ParseField RESULT_TYPE = new ParseField("result_type");
|
||||||
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
public static final ParseField TIMESTAMP = new ParseField("timestamp");
|
||||||
public static final ParseField IS_INTERIM = new ParseField("is_interim");
|
public static final ParseField IS_INTERIM = new ParseField("is_interim");
|
||||||
|
@ -0,0 +1,893 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.client.methods.HttpDelete;
|
||||||
|
import org.apache.http.client.methods.HttpGet;
|
||||||
|
import org.apache.http.client.methods.HttpHead;
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
|
import org.apache.http.client.methods.HttpPut;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
|
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||||
|
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
||||||
|
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||||
|
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||||
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
|
import org.elasticsearch.index.RandomCreateIndexGenerator;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.junit.Assert;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.StringJoiner;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.elasticsearch.index.RandomCreateIndexGenerator.randomAliases;
|
||||||
|
import static org.elasticsearch.index.RandomCreateIndexGenerator.randomCreateIndexRequest;
|
||||||
|
import static org.elasticsearch.index.RandomCreateIndexGenerator.randomIndexSettings;
|
||||||
|
import static org.elasticsearch.index.alias.RandomAliasActionsGenerator.randomAliasAction;
|
||||||
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
|
public class IndicesRequestConvertersTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testAnalyzeRequest() throws Exception {
|
||||||
|
AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
|
||||||
|
.text("Here is some text")
|
||||||
|
.index("test_index")
|
||||||
|
.analyzer("test_analyzer");
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.analyze(indexAnalyzeRequest);
|
||||||
|
assertThat(request.getEndpoint(), equalTo("/test_index/_analyze"));
|
||||||
|
RequestConvertersTests.assertToXContentBody(indexAnalyzeRequest, request.getEntity());
|
||||||
|
|
||||||
|
AnalyzeRequest analyzeRequest = new AnalyzeRequest()
|
||||||
|
.text("more text")
|
||||||
|
.analyzer("test_analyzer");
|
||||||
|
assertThat(IndicesRequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testIndicesExist() {
|
||||||
|
String[] indices = RequestConvertersTests.randomIndicesNames(1, 10);
|
||||||
|
|
||||||
|
GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indices);
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomLocal(getIndexRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomHumanReadable(getIndexRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomIncludeDefaults(getIndexRequest, expectedParams);
|
||||||
|
|
||||||
|
final Request request = IndicesRequestConverters.indicesExist(getIndexRequest);
|
||||||
|
|
||||||
|
Assert.assertEquals(HttpHead.METHOD_NAME, request.getMethod());
|
||||||
|
Assert.assertEquals("/" + String.join(",", indices), request.getEndpoint());
|
||||||
|
Assert.assertThat(expectedParams, equalTo(request.getParameters()));
|
||||||
|
Assert.assertNull(request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testIndicesExistEmptyIndices() {
|
||||||
|
LuceneTestCase.expectThrows(IllegalArgumentException.class, ()
|
||||||
|
-> IndicesRequestConverters.indicesExist(new GetIndexRequest()));
|
||||||
|
LuceneTestCase.expectThrows(IllegalArgumentException.class, ()
|
||||||
|
-> IndicesRequestConverters.indicesExist(new GetIndexRequest().indices((String[]) null)));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreateIndex() throws IOException {
|
||||||
|
CreateIndexRequest createIndexRequest = randomCreateIndexRequest();
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomTimeout(createIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(createIndexRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.createIndex(createIndexRequest);
|
||||||
|
Assert.assertEquals("/" + createIndexRequest.index(), request.getEndpoint());
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||||
|
RequestConvertersTests.assertToXContentBody(createIndexRequest, request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreateIndexNullIndex() {
|
||||||
|
ActionRequestValidationException validationException = new CreateIndexRequest(null).validate();
|
||||||
|
Assert.assertNotNull(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testUpdateAliases() throws IOException {
|
||||||
|
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
|
||||||
|
IndicesAliasesRequest.AliasActions aliasAction = randomAliasAction();
|
||||||
|
indicesAliasesRequest.addAliasAction(aliasAction);
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomTimeout(indicesAliasesRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(indicesAliasesRequest, expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.updateAliases(indicesAliasesRequest);
|
||||||
|
Assert.assertEquals("/_aliases", request.getEndpoint());
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
RequestConvertersTests.assertToXContentBody(indicesAliasesRequest, request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPutMapping() throws IOException {
|
||||||
|
PutMappingRequest putMappingRequest = new PutMappingRequest();
|
||||||
|
|
||||||
|
String[] indices = RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
putMappingRequest.indices(indices);
|
||||||
|
|
||||||
|
String type = ESTestCase.randomAlphaOfLengthBetween(3, 10);
|
||||||
|
putMappingRequest.type(type);
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
|
||||||
|
RequestConvertersTests.setRandomTimeout(putMappingRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(putMappingRequest, expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.putMapping(putMappingRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
String index = String.join(",", indices);
|
||||||
|
if (Strings.hasLength(index)) {
|
||||||
|
endpoint.add(index);
|
||||||
|
}
|
||||||
|
endpoint.add("_mapping");
|
||||||
|
endpoint.add(type);
|
||||||
|
Assert.assertEquals(endpoint.toString(), request.getEndpoint());
|
||||||
|
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||||
|
RequestConvertersTests.assertToXContentBody(putMappingRequest, request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetMapping() throws IOException {
|
||||||
|
GetMappingsRequest getMappingRequest = new GetMappingsRequest();
|
||||||
|
|
||||||
|
String[] indices = Strings.EMPTY_ARRAY;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
indices = RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
getMappingRequest.indices(indices);
|
||||||
|
} else if (ESTestCase.randomBoolean()) {
|
||||||
|
getMappingRequest.indices((String[]) null);
|
||||||
|
}
|
||||||
|
|
||||||
|
String type = null;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
type = ESTestCase.randomAlphaOfLengthBetween(3, 10);
|
||||||
|
getMappingRequest.types(type);
|
||||||
|
} else if (ESTestCase.randomBoolean()) {
|
||||||
|
getMappingRequest.types((String[]) null);
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(getMappingRequest::indicesOptions,
|
||||||
|
getMappingRequest::indicesOptions, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(getMappingRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomLocal(getMappingRequest, expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.getMappings(getMappingRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
String index = String.join(",", indices);
|
||||||
|
if (Strings.hasLength(index)) {
|
||||||
|
endpoint.add(index);
|
||||||
|
}
|
||||||
|
endpoint.add("_mapping");
|
||||||
|
if (type != null) {
|
||||||
|
endpoint.add(type);
|
||||||
|
}
|
||||||
|
Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||||
|
|
||||||
|
Assert.assertThat(expectedParams, equalTo(request.getParameters()));
|
||||||
|
Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetFieldMapping() throws IOException {
|
||||||
|
GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest();
|
||||||
|
|
||||||
|
String[] indices = Strings.EMPTY_ARRAY;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
indices = RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
getFieldMappingsRequest.indices(indices);
|
||||||
|
} else if (ESTestCase.randomBoolean()) {
|
||||||
|
getFieldMappingsRequest.indices((String[]) null);
|
||||||
|
}
|
||||||
|
|
||||||
|
String type = null;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
type = ESTestCase.randomAlphaOfLengthBetween(3, 10);
|
||||||
|
getFieldMappingsRequest.types(type);
|
||||||
|
} else if (ESTestCase.randomBoolean()) {
|
||||||
|
getFieldMappingsRequest.types((String[]) null);
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] fields = null;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
fields = new String[ESTestCase.randomIntBetween(1, 5)];
|
||||||
|
for (int i = 0; i < fields.length; i++) {
|
||||||
|
fields[i] = ESTestCase.randomAlphaOfLengthBetween(3, 10);
|
||||||
|
}
|
||||||
|
getFieldMappingsRequest.fields(fields);
|
||||||
|
} else if (ESTestCase.randomBoolean()) {
|
||||||
|
getFieldMappingsRequest.fields((String[]) null);
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(getFieldMappingsRequest::indicesOptions, getFieldMappingsRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
RequestConvertersTests.setRandomLocal(getFieldMappingsRequest::local, expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.getFieldMapping(getFieldMappingsRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
String index = String.join(",", indices);
|
||||||
|
if (Strings.hasLength(index)) {
|
||||||
|
endpoint.add(index);
|
||||||
|
}
|
||||||
|
endpoint.add("_mapping");
|
||||||
|
if (type != null) {
|
||||||
|
endpoint.add(type);
|
||||||
|
}
|
||||||
|
endpoint.add("field");
|
||||||
|
if (fields != null) {
|
||||||
|
endpoint.add(String.join(",", fields));
|
||||||
|
}
|
||||||
|
Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||||
|
|
||||||
|
Assert.assertThat(expectedParams, equalTo(request.getParameters()));
|
||||||
|
Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDeleteIndex() {
|
||||||
|
String[] indices = RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices);
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomTimeout(deleteIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(deleteIndexRequest, expectedParams);
|
||||||
|
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(deleteIndexRequest::indicesOptions, deleteIndexRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.deleteIndex(deleteIndexRequest);
|
||||||
|
Assert.assertEquals("/" + String.join(",", indices), request.getEndpoint());
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
Assert.assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||||
|
Assert.assertNull(request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetSettings() throws IOException {
|
||||||
|
String[] indicesUnderTest = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
|
||||||
|
GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indicesUnderTest);
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(getSettingsRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(getSettingsRequest::indicesOptions, getSettingsRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
|
||||||
|
RequestConvertersTests.setRandomLocal(getSettingsRequest, expectedParams);
|
||||||
|
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
// the request object will not have include_defaults present unless it is set to
|
||||||
|
// true
|
||||||
|
getSettingsRequest.includeDefaults(ESTestCase.randomBoolean());
|
||||||
|
if (getSettingsRequest.includeDefaults()) {
|
||||||
|
expectedParams.put("include_defaults", Boolean.toString(true));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indicesUnderTest != null && indicesUnderTest.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indicesUnderTest));
|
||||||
|
}
|
||||||
|
endpoint.add("_settings");
|
||||||
|
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
String[] names = ESTestCase.randomBoolean() ? null : new String[ESTestCase.randomIntBetween(0, 3)];
|
||||||
|
if (names != null) {
|
||||||
|
for (int x = 0; x < names.length; x++) {
|
||||||
|
names[x] = ESTestCase.randomAlphaOfLengthBetween(3, 10);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
getSettingsRequest.names(names);
|
||||||
|
if (names != null && names.length > 0) {
|
||||||
|
endpoint.add(String.join(",", names));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.getSettings(getSettingsRequest);
|
||||||
|
|
||||||
|
Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetIndex() throws IOException {
|
||||||
|
String[] indicesUnderTest = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
|
||||||
|
GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indicesUnderTest);
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(getIndexRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomLocal(getIndexRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomHumanReadable(getIndexRequest, expectedParams);
|
||||||
|
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
// the request object will not have include_defaults present unless it is set to
|
||||||
|
// true
|
||||||
|
getIndexRequest.includeDefaults(ESTestCase.randomBoolean());
|
||||||
|
if (getIndexRequest.includeDefaults()) {
|
||||||
|
expectedParams.put("include_defaults", Boolean.toString(true));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indicesUnderTest != null && indicesUnderTest.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indicesUnderTest));
|
||||||
|
}
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.getIndex(getIndexRequest);
|
||||||
|
|
||||||
|
Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDeleteIndexEmptyIndices() {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY;
|
||||||
|
ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate();
|
||||||
|
Assert.assertNotNull(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testOpenIndex() {
|
||||||
|
String[] indices = RequestConvertersTests.randomIndicesNames(1, 5);
|
||||||
|
OpenIndexRequest openIndexRequest = new OpenIndexRequest(indices);
|
||||||
|
openIndexRequest.indices(indices);
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomTimeout(openIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(openIndexRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(openIndexRequest::indicesOptions, openIndexRequest::indicesOptions, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomWaitForActiveShards(openIndexRequest::waitForActiveShards, expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.openIndex(openIndexRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "").add(String.join(",", indices)).add("_open");
|
||||||
|
Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||||
|
Assert.assertThat(expectedParams, equalTo(request.getParameters()));
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testOpenIndexEmptyIndices() {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY;
|
||||||
|
ActionRequestValidationException validationException = new OpenIndexRequest(indices).validate();
|
||||||
|
Assert.assertNotNull(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCloseIndex() {
|
||||||
|
String[] indices = RequestConvertersTests.randomIndicesNames(1, 5);
|
||||||
|
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(indices);
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomTimeout(closeIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(closeIndexRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(closeIndexRequest::indicesOptions, closeIndexRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.closeIndex(closeIndexRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "").add(String.join(",", indices)).add("_close");
|
||||||
|
Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||||
|
Assert.assertThat(expectedParams, equalTo(request.getParameters()));
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCloseIndexEmptyIndices() {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY;
|
||||||
|
ActionRequestValidationException validationException = new CloseIndexRequest(indices).validate();
|
||||||
|
Assert.assertNotNull(validationException);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRefresh() {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
RefreshRequest refreshRequest;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
refreshRequest = new RefreshRequest(indices);
|
||||||
|
} else {
|
||||||
|
refreshRequest = new RefreshRequest();
|
||||||
|
refreshRequest.indices(indices);
|
||||||
|
}
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(refreshRequest::indicesOptions, refreshRequest::indicesOptions, expectedParams);
|
||||||
|
Request request = IndicesRequestConverters.refresh(refreshRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indices != null && indices.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indices));
|
||||||
|
}
|
||||||
|
endpoint.add("_refresh");
|
||||||
|
Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFlush() {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
FlushRequest flushRequest;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
flushRequest = new FlushRequest(indices);
|
||||||
|
} else {
|
||||||
|
flushRequest = new FlushRequest();
|
||||||
|
flushRequest.indices(indices);
|
||||||
|
}
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(flushRequest::indicesOptions, flushRequest::indicesOptions, expectedParams);
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
flushRequest.force(ESTestCase.randomBoolean());
|
||||||
|
}
|
||||||
|
expectedParams.put("force", Boolean.toString(flushRequest.force()));
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
flushRequest.waitIfOngoing(ESTestCase.randomBoolean());
|
||||||
|
}
|
||||||
|
expectedParams.put("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing()));
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.flush(flushRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indices != null && indices.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indices));
|
||||||
|
}
|
||||||
|
endpoint.add("_flush");
|
||||||
|
Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSyncedFlush() {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
SyncedFlushRequest syncedFlushRequest;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
syncedFlushRequest = new SyncedFlushRequest(indices);
|
||||||
|
} else {
|
||||||
|
syncedFlushRequest = new SyncedFlushRequest();
|
||||||
|
syncedFlushRequest.indices(indices);
|
||||||
|
}
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(syncedFlushRequest::indicesOptions, syncedFlushRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
Request request = IndicesRequestConverters.flushSynced(syncedFlushRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indices != null && indices.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indices));
|
||||||
|
}
|
||||||
|
endpoint.add("_flush/synced");
|
||||||
|
Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testForceMerge() {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
ForceMergeRequest forceMergeRequest;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
forceMergeRequest = new ForceMergeRequest(indices);
|
||||||
|
} else {
|
||||||
|
forceMergeRequest = new ForceMergeRequest();
|
||||||
|
forceMergeRequest.indices(indices);
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(forceMergeRequest::indicesOptions, forceMergeRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
forceMergeRequest.maxNumSegments(ESTestCase.randomInt());
|
||||||
|
}
|
||||||
|
expectedParams.put("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments()));
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
forceMergeRequest.onlyExpungeDeletes(ESTestCase.randomBoolean());
|
||||||
|
}
|
||||||
|
expectedParams.put("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes()));
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
forceMergeRequest.flush(ESTestCase.randomBoolean());
|
||||||
|
}
|
||||||
|
expectedParams.put("flush", Boolean.toString(forceMergeRequest.flush()));
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.forceMerge(forceMergeRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indices != null && indices.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indices));
|
||||||
|
}
|
||||||
|
endpoint.add("_forcemerge");
|
||||||
|
Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testClearCache() {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
ClearIndicesCacheRequest clearIndicesCacheRequest;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
clearIndicesCacheRequest = new ClearIndicesCacheRequest(indices);
|
||||||
|
} else {
|
||||||
|
clearIndicesCacheRequest = new ClearIndicesCacheRequest();
|
||||||
|
clearIndicesCacheRequest.indices(indices);
|
||||||
|
}
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(clearIndicesCacheRequest::indicesOptions, clearIndicesCacheRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
clearIndicesCacheRequest.queryCache(ESTestCase.randomBoolean());
|
||||||
|
}
|
||||||
|
expectedParams.put("query", Boolean.toString(clearIndicesCacheRequest.queryCache()));
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
clearIndicesCacheRequest.fieldDataCache(ESTestCase.randomBoolean());
|
||||||
|
}
|
||||||
|
expectedParams.put("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache()));
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
clearIndicesCacheRequest.requestCache(ESTestCase.randomBoolean());
|
||||||
|
}
|
||||||
|
expectedParams.put("request", Boolean.toString(clearIndicesCacheRequest.requestCache()));
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
clearIndicesCacheRequest.fields(RequestConvertersTests.randomIndicesNames(1, 5));
|
||||||
|
expectedParams.put("fields", String.join(",", clearIndicesCacheRequest.fields()));
|
||||||
|
}
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.clearCache(clearIndicesCacheRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indices != null && indices.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indices));
|
||||||
|
}
|
||||||
|
endpoint.add("_cache/clear");
|
||||||
|
Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testExistsAlias() {
|
||||||
|
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
getAliasesRequest.indices(indices);
|
||||||
|
// the HEAD endpoint requires at least an alias or an index
|
||||||
|
boolean hasIndices = indices != null && indices.length > 0;
|
||||||
|
String[] aliases;
|
||||||
|
if (hasIndices) {
|
||||||
|
aliases = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
} else {
|
||||||
|
aliases = RequestConvertersTests.randomIndicesNames(1, 5);
|
||||||
|
}
|
||||||
|
getAliasesRequest.aliases(aliases);
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomLocal(getAliasesRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(getAliasesRequest::indicesOptions, getAliasesRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.existsAlias(getAliasesRequest);
|
||||||
|
StringJoiner expectedEndpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indices != null && indices.length > 0) {
|
||||||
|
expectedEndpoint.add(String.join(",", indices));
|
||||||
|
}
|
||||||
|
expectedEndpoint.add("_alias");
|
||||||
|
if (aliases != null && aliases.length > 0) {
|
||||||
|
expectedEndpoint.add(String.join(",", aliases));
|
||||||
|
}
|
||||||
|
Assert.assertEquals(HttpHead.METHOD_NAME, request.getMethod());
|
||||||
|
Assert.assertEquals(expectedEndpoint.toString(), request.getEndpoint());
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
Assert.assertNull(request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testExistsAliasNoAliasNoIndex() {
|
||||||
|
{
|
||||||
|
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
||||||
|
IllegalArgumentException iae = LuceneTestCase.expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> IndicesRequestConverters.existsAlias(getAliasesRequest));
|
||||||
|
Assert.assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
|
||||||
|
}
|
||||||
|
{
|
||||||
|
GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[]) null);
|
||||||
|
getAliasesRequest.indices((String[]) null);
|
||||||
|
IllegalArgumentException iae = LuceneTestCase.expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> IndicesRequestConverters.existsAlias(getAliasesRequest));
|
||||||
|
Assert.assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSplit() throws IOException {
|
||||||
|
resizeTest(ResizeType.SPLIT, IndicesRequestConverters::split);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSplitWrongResizeType() {
|
||||||
|
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
|
||||||
|
resizeRequest.setResizeType(ResizeType.SHRINK);
|
||||||
|
IllegalArgumentException iae = LuceneTestCase.expectThrows(IllegalArgumentException.class, ()
|
||||||
|
-> IndicesRequestConverters.split(resizeRequest));
|
||||||
|
Assert.assertEquals("Wrong resize type [SHRINK] for indices split request", iae.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testShrinkWrongResizeType() {
|
||||||
|
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
|
||||||
|
resizeRequest.setResizeType(ResizeType.SPLIT);
|
||||||
|
IllegalArgumentException iae = LuceneTestCase.expectThrows(IllegalArgumentException.class, ()
|
||||||
|
-> IndicesRequestConverters.shrink(resizeRequest));
|
||||||
|
Assert.assertEquals("Wrong resize type [SPLIT] for indices shrink request", iae.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testShrink() throws IOException {
|
||||||
|
resizeTest(ResizeType.SHRINK, IndicesRequestConverters::shrink);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void resizeTest(ResizeType resizeType, CheckedFunction<ResizeRequest, Request, IOException> function)
|
||||||
|
throws IOException {
|
||||||
|
String[] indices = RequestConvertersTests.randomIndicesNames(2, 2);
|
||||||
|
ResizeRequest resizeRequest = new ResizeRequest(indices[0], indices[1]);
|
||||||
|
resizeRequest.setResizeType(resizeType);
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(resizeRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomTimeout(resizeRequest::timeout, resizeRequest.timeout(), expectedParams);
|
||||||
|
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
CreateIndexRequest createIndexRequest = new CreateIndexRequest(ESTestCase.randomAlphaOfLengthBetween(3, 10));
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
createIndexRequest.settings(randomIndexSettings());
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
randomAliases(createIndexRequest);
|
||||||
|
}
|
||||||
|
resizeRequest.setTargetIndex(createIndexRequest);
|
||||||
|
}
|
||||||
|
RequestConvertersTests.setRandomWaitForActiveShards(resizeRequest::setWaitForActiveShards, expectedParams);
|
||||||
|
|
||||||
|
Request request = function.apply(resizeRequest);
|
||||||
|
Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||||
|
String expectedEndpoint = "/" + resizeRequest.getSourceIndex() + "/_" + resizeType.name().toLowerCase(Locale.ROOT) + "/"
|
||||||
|
+ resizeRequest.getTargetIndexRequest().index();
|
||||||
|
Assert.assertEquals(expectedEndpoint, request.getEndpoint());
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
RequestConvertersTests.assertToXContentBody(resizeRequest, request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRollover() throws IOException {
|
||||||
|
RolloverRequest rolloverRequest = new RolloverRequest(ESTestCase.randomAlphaOfLengthBetween(3, 10),
|
||||||
|
ESTestCase.randomBoolean() ? null : ESTestCase.randomAlphaOfLengthBetween(3, 10));
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomTimeout(rolloverRequest::timeout, rolloverRequest.timeout(), expectedParams);
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(rolloverRequest, expectedParams);
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
rolloverRequest.dryRun(ESTestCase.randomBoolean());
|
||||||
|
if (rolloverRequest.isDryRun()) {
|
||||||
|
expectedParams.put("dry_run", "true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
rolloverRequest.addMaxIndexAgeCondition(new TimeValue(ESTestCase.randomNonNegativeLong()));
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
String type = ESTestCase.randomAlphaOfLengthBetween(3, 10);
|
||||||
|
rolloverRequest.getCreateIndexRequest().mapping(type, RandomCreateIndexGenerator.randomMapping(type));
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest());
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
rolloverRequest.getCreateIndexRequest().settings(RandomCreateIndexGenerator.randomIndexSettings());
|
||||||
|
}
|
||||||
|
RequestConvertersTests.setRandomWaitForActiveShards(rolloverRequest.getCreateIndexRequest()::waitForActiveShards, expectedParams);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.rollover(rolloverRequest);
|
||||||
|
if (rolloverRequest.getNewIndexName() == null) {
|
||||||
|
Assert.assertEquals("/" + rolloverRequest.getAlias() + "/_rollover", request.getEndpoint());
|
||||||
|
} else {
|
||||||
|
Assert.assertEquals("/" + rolloverRequest.getAlias() + "/_rollover/" + rolloverRequest.getNewIndexName(),
|
||||||
|
request.getEndpoint());
|
||||||
|
}
|
||||||
|
Assert.assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
|
RequestConvertersTests.assertToXContentBody(rolloverRequest, request.getEntity());
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetAlias() {
|
||||||
|
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
||||||
|
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomLocal(getAliasesRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(getAliasesRequest::indicesOptions, getAliasesRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 2);
|
||||||
|
String[] aliases = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 2);
|
||||||
|
getAliasesRequest.indices(indices);
|
||||||
|
getAliasesRequest.aliases(aliases);
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.getAlias(getAliasesRequest);
|
||||||
|
StringJoiner expectedEndpoint = new StringJoiner("/", "/", "");
|
||||||
|
|
||||||
|
if (false == CollectionUtils.isEmpty(indices)) {
|
||||||
|
expectedEndpoint.add(String.join(",", indices));
|
||||||
|
}
|
||||||
|
expectedEndpoint.add("_alias");
|
||||||
|
|
||||||
|
if (false == CollectionUtils.isEmpty(aliases)) {
|
||||||
|
expectedEndpoint.add(String.join(",", aliases));
|
||||||
|
}
|
||||||
|
|
||||||
|
Assert.assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||||
|
Assert.assertEquals(expectedEndpoint.toString(), request.getEndpoint());
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
Assert.assertNull(request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testIndexPutSettings() throws IOException {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 2);
|
||||||
|
UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indices);
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(updateSettingsRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomTimeout(updateSettingsRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(updateSettingsRequest::indicesOptions, updateSettingsRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
updateSettingsRequest.setPreserveExisting(ESTestCase.randomBoolean());
|
||||||
|
if (updateSettingsRequest.isPreserveExisting()) {
|
||||||
|
expectedParams.put("preserve_existing", "true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Request request = IndicesRequestConverters.indexPutSettings(updateSettingsRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indices != null && indices.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indices));
|
||||||
|
}
|
||||||
|
endpoint.add("_settings");
|
||||||
|
Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||||
|
Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||||
|
RequestConvertersTests.assertToXContentBody(updateSettingsRequest, request.getEntity());
|
||||||
|
Assert.assertEquals(expectedParams, request.getParameters());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPutTemplateRequest() throws Exception {
|
||||||
|
Map<String, String> names = new HashMap<>();
|
||||||
|
names.put("log", "log");
|
||||||
|
names.put("template#1", "template%231");
|
||||||
|
names.put("-#template", "-%23template");
|
||||||
|
names.put("foo^bar", "foo%5Ebar");
|
||||||
|
|
||||||
|
PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest().name(ESTestCase.randomFrom(names.keySet()))
|
||||||
|
.patterns(Arrays.asList(ESTestCase.generateRandomStringArray(20, 100, false, false)));
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
putTemplateRequest.order(ESTestCase.randomInt());
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
putTemplateRequest.version(ESTestCase.randomInt());
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
putTemplateRequest.settings(Settings.builder().put("setting-" + ESTestCase.randomInt(), ESTestCase.randomTimeValue()));
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
putTemplateRequest.mapping("doc-" + ESTestCase.randomInt(),
|
||||||
|
"field-" + ESTestCase.randomInt(), "type=" + ESTestCase.randomFrom("text", "keyword"));
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
putTemplateRequest.alias(new Alias("alias-" + ESTestCase.randomInt()));
|
||||||
|
}
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
expectedParams.put("create", Boolean.TRUE.toString());
|
||||||
|
putTemplateRequest.create(true);
|
||||||
|
}
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
String cause = ESTestCase.randomUnicodeOfCodepointLengthBetween(1, 50);
|
||||||
|
putTemplateRequest.cause(cause);
|
||||||
|
expectedParams.put("cause", cause);
|
||||||
|
}
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(putTemplateRequest, expectedParams);
|
||||||
|
Request request = IndicesRequestConverters.putTemplate(putTemplateRequest);
|
||||||
|
Assert.assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name())));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
RequestConvertersTests.assertToXContentBody(putTemplateRequest, request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateQuery() throws Exception {
|
||||||
|
String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5);
|
||||||
|
String[] types = ESTestCase.randomBoolean() ? ESTestCase.generateRandomStringArray(5, 5, false, false) : null;
|
||||||
|
ValidateQueryRequest validateQueryRequest;
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
validateQueryRequest = new ValidateQueryRequest(indices);
|
||||||
|
} else {
|
||||||
|
validateQueryRequest = new ValidateQueryRequest();
|
||||||
|
validateQueryRequest.indices(indices);
|
||||||
|
}
|
||||||
|
validateQueryRequest.types(types);
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomIndicesOptions(validateQueryRequest::indicesOptions, validateQueryRequest::indicesOptions,
|
||||||
|
expectedParams);
|
||||||
|
validateQueryRequest.explain(ESTestCase.randomBoolean());
|
||||||
|
validateQueryRequest.rewrite(ESTestCase.randomBoolean());
|
||||||
|
validateQueryRequest.allShards(ESTestCase.randomBoolean());
|
||||||
|
expectedParams.put("explain", Boolean.toString(validateQueryRequest.explain()));
|
||||||
|
expectedParams.put("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
|
||||||
|
expectedParams.put("all_shards", Boolean.toString(validateQueryRequest.allShards()));
|
||||||
|
Request request = IndicesRequestConverters.validateQuery(validateQueryRequest);
|
||||||
|
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||||
|
if (indices != null && indices.length > 0) {
|
||||||
|
endpoint.add(String.join(",", indices));
|
||||||
|
if (types != null && types.length > 0) {
|
||||||
|
endpoint.add(String.join(",", types));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
endpoint.add("_validate/query");
|
||||||
|
Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
RequestConvertersTests.assertToXContentBody(validateQueryRequest, request.getEntity());
|
||||||
|
Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetTemplateRequest() throws Exception {
|
||||||
|
Map<String, String> encodes = new HashMap<>();
|
||||||
|
encodes.put("log", "log");
|
||||||
|
encodes.put("1", "1");
|
||||||
|
encodes.put("template#1", "template%231");
|
||||||
|
encodes.put("template-*", "template-*");
|
||||||
|
encodes.put("foo^bar", "foo%5Ebar");
|
||||||
|
List<String> names = ESTestCase.randomSubsetOf(1, encodes.keySet());
|
||||||
|
GetIndexTemplatesRequest getTemplatesRequest = new GetIndexTemplatesRequest().names(names.toArray(new String[0]));
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
RequestConvertersTests.setRandomMasterTimeout(getTemplatesRequest, expectedParams);
|
||||||
|
RequestConvertersTests.setRandomLocal(getTemplatesRequest, expectedParams);
|
||||||
|
Request request = IndicesRequestConverters.getTemplates(getTemplatesRequest);
|
||||||
|
Assert.assertThat(request.getEndpoint(),
|
||||||
|
equalTo("/_template/" + names.stream().map(encodes::get).collect(Collectors.joining(","))));
|
||||||
|
Assert.assertThat(request.getParameters(), equalTo(expectedParams));
|
||||||
|
Assert.assertThat(request.getEntity(), nullValue());
|
||||||
|
}
|
||||||
|
}
|
@ -30,6 +30,7 @@ import org.elasticsearch.client.ml.DeleteJobRequest;
|
|||||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetCalendarsRequest;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||||
@ -259,7 +260,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||||||
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
|
assertEquals(Boolean.toString(true), request.getParameters().get("force"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDeleteForecast() throws Exception {
|
public void testDeleteForecast() {
|
||||||
String jobId = randomAlphaOfLength(10);
|
String jobId = randomAlphaOfLength(10);
|
||||||
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId);
|
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId);
|
||||||
|
|
||||||
@ -415,6 +416,28 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetCalendars() throws IOException {
|
||||||
|
GetCalendarsRequest getCalendarsRequest = new GetCalendarsRequest();
|
||||||
|
String expectedEndpoint = "/_xpack/ml/calendars";
|
||||||
|
|
||||||
|
if (randomBoolean()) {
|
||||||
|
String calendarId = randomAlphaOfLength(10);
|
||||||
|
getCalendarsRequest.setCalendarId(calendarId);
|
||||||
|
expectedEndpoint += "/" + calendarId;
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
getCalendarsRequest.setPageParams(new PageParams(10, 20));
|
||||||
|
}
|
||||||
|
|
||||||
|
Request request = MLRequestConverters.getCalendars(getCalendarsRequest);
|
||||||
|
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||||
|
assertEquals(expectedEndpoint, request.getEndpoint());
|
||||||
|
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||||
|
GetCalendarsRequest parsedRequest = GetCalendarsRequest.PARSER.apply(parser, null);
|
||||||
|
assertThat(parsedRequest, equalTo(getCalendarsRequest));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static Job createValidJob(String jobId) {
|
private static Job createValidJob(String jobId) {
|
||||||
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
|
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
|
||||||
Detector.builder().setFunction("count").build()));
|
Detector.builder().setFunction("count").build()));
|
||||||
|
@ -32,6 +32,8 @@ import org.elasticsearch.client.ml.FlushJobRequest;
|
|||||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||||
import org.elasticsearch.client.ml.ForecastJobResponse;
|
import org.elasticsearch.client.ml.ForecastJobResponse;
|
||||||
|
import org.elasticsearch.client.ml.GetCalendarsRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetCalendarsResponse;
|
||||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
import org.elasticsearch.client.ml.GetDatafeedResponse;
|
import org.elasticsearch.client.ml.GetDatafeedResponse;
|
||||||
import org.elasticsearch.client.ml.GetJobRequest;
|
import org.elasticsearch.client.ml.GetJobRequest;
|
||||||
@ -483,7 +485,6 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testPutCalendar() throws IOException {
|
public void testPutCalendar() throws IOException {
|
||||||
|
|
||||||
Calendar calendar = CalendarTests.testInstance();
|
Calendar calendar = CalendarTests.testInstance();
|
||||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||||
PutCalendarResponse putCalendarResponse = execute(new PutCalendarRequest(calendar), machineLearningClient::putCalendar,
|
PutCalendarResponse putCalendarResponse = execute(new PutCalendarRequest(calendar), machineLearningClient::putCalendar,
|
||||||
@ -492,6 +493,30 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||||||
assertThat(putCalendarResponse.getCalendar(), equalTo(calendar));
|
assertThat(putCalendarResponse.getCalendar(), equalTo(calendar));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetCalendars() throws Exception {
|
||||||
|
Calendar calendar1 = CalendarTests.testInstance();
|
||||||
|
Calendar calendar2 = CalendarTests.testInstance();
|
||||||
|
|
||||||
|
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||||
|
machineLearningClient.putCalendar(new PutCalendarRequest(calendar1), RequestOptions.DEFAULT);
|
||||||
|
machineLearningClient.putCalendar(new PutCalendarRequest(calendar2), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
GetCalendarsRequest getCalendarsRequest = new GetCalendarsRequest();
|
||||||
|
getCalendarsRequest.setCalendarId("_all");
|
||||||
|
GetCalendarsResponse getCalendarsResponse = execute(getCalendarsRequest, machineLearningClient::getCalendars,
|
||||||
|
machineLearningClient::getCalendarsAsync);
|
||||||
|
assertEquals(2, getCalendarsResponse.count());
|
||||||
|
assertEquals(2, getCalendarsResponse.calendars().size());
|
||||||
|
assertThat(getCalendarsResponse.calendars().stream().map(Calendar::getId).collect(Collectors.toList()),
|
||||||
|
hasItems(calendar1.getId(), calendar1.getId()));
|
||||||
|
|
||||||
|
getCalendarsRequest.setCalendarId(calendar1.getId());
|
||||||
|
getCalendarsResponse = execute(getCalendarsRequest, machineLearningClient::getCalendars,
|
||||||
|
machineLearningClient::getCalendarsAsync);
|
||||||
|
assertEquals(1, getCalendarsResponse.count());
|
||||||
|
assertEquals(calendar1, getCalendarsResponse.calendars().get(0));
|
||||||
|
}
|
||||||
|
|
||||||
public static String randomValidJobId() {
|
public static String randomValidJobId() {
|
||||||
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
|
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
|
||||||
return generator.ofCodePointsLength(random(), 10, 10);
|
return generator.ofCodePointsLength(random(), 10, 10);
|
||||||
|
@ -27,37 +27,12 @@ import org.apache.http.client.methods.HttpPost;
|
|||||||
import org.apache.http.client.methods.HttpPut;
|
import org.apache.http.client.methods.HttpPut;
|
||||||
import org.apache.http.entity.ByteArrayEntity;
|
import org.apache.http.entity.ByteArrayEntity;
|
||||||
import org.apache.http.util.EntityUtils;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
|
||||||
import org.elasticsearch.action.DocWriteRequest;
|
import org.elasticsearch.action.DocWriteRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
|
||||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
|
||||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
|
||||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
|
||||||
import org.elasticsearch.action.bulk.BulkRequest;
|
import org.elasticsearch.action.bulk.BulkRequest;
|
||||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||||
import org.elasticsearch.action.delete.DeleteRequest;
|
import org.elasticsearch.action.delete.DeleteRequest;
|
||||||
@ -86,22 +61,18 @@ import org.elasticsearch.client.indexlifecycle.LifecyclePolicy;
|
|||||||
import org.elasticsearch.client.indexlifecycle.PutLifecyclePolicyRequest;
|
import org.elasticsearch.client.indexlifecycle.PutLifecyclePolicyRequest;
|
||||||
import org.elasticsearch.client.indexlifecycle.DeleteLifecyclePolicyRequest;
|
import org.elasticsearch.client.indexlifecycle.DeleteLifecyclePolicyRequest;
|
||||||
import org.elasticsearch.common.CheckedBiConsumer;
|
import org.elasticsearch.common.CheckedBiConsumer;
|
||||||
import org.elasticsearch.common.CheckedFunction;
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.io.Streams;
|
import org.elasticsearch.common.io.Streams;
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.CollectionUtils;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.index.RandomCreateIndexGenerator;
|
|
||||||
import org.elasticsearch.index.VersionType;
|
import org.elasticsearch.index.VersionType;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||||
@ -150,17 +121,12 @@ import java.util.StringJoiner;
|
|||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
import static java.util.Collections.singletonMap;
|
import static java.util.Collections.singletonMap;
|
||||||
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||||
import static org.elasticsearch.client.RequestConverters.enforceSameContentType;
|
import static org.elasticsearch.client.RequestConverters.enforceSameContentType;
|
||||||
import static org.elasticsearch.client.indexlifecycle.LifecyclePolicyTests.createRandomPolicy;
|
import static org.elasticsearch.client.indexlifecycle.LifecyclePolicyTests.createRandomPolicy;
|
||||||
import static org.elasticsearch.index.RandomCreateIndexGenerator.randomAliases;
|
|
||||||
import static org.elasticsearch.index.RandomCreateIndexGenerator.randomCreateIndexRequest;
|
|
||||||
import static org.elasticsearch.index.RandomCreateIndexGenerator.randomIndexSettings;
|
|
||||||
import static org.elasticsearch.index.alias.RandomAliasActionsGenerator.randomAliasAction;
|
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||||
import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest;
|
import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||||
@ -271,30 +237,6 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
getAndExistsTest(RequestConverters::exists, HttpHead.METHOD_NAME);
|
getAndExistsTest(RequestConverters::exists, HttpHead.METHOD_NAME);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIndicesExist() {
|
|
||||||
String[] indices = randomIndicesNames(1, 10);
|
|
||||||
|
|
||||||
GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indices);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams);
|
|
||||||
setRandomLocal(getIndexRequest, expectedParams);
|
|
||||||
setRandomHumanReadable(getIndexRequest, expectedParams);
|
|
||||||
setRandomIncludeDefaults(getIndexRequest, expectedParams);
|
|
||||||
|
|
||||||
final Request request = RequestConverters.indicesExist(getIndexRequest);
|
|
||||||
|
|
||||||
assertEquals(HttpHead.METHOD_NAME, request.getMethod());
|
|
||||||
assertEquals("/" + String.join(",", indices), request.getEndpoint());
|
|
||||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
|
||||||
assertNull(request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIndicesExistEmptyIndices() {
|
|
||||||
expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest()));
|
|
||||||
expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest().indices((String[]) null)));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void getAndExistsTest(Function<GetRequest, Request> requestConverter, String method) {
|
private static void getAndExistsTest(Function<GetRequest, Request> requestConverter, String method) {
|
||||||
String index = randomAlphaOfLengthBetween(3, 10);
|
String index = randomAlphaOfLengthBetween(3, 10);
|
||||||
String type = randomAlphaOfLengthBetween(3, 10);
|
String type = randomAlphaOfLengthBetween(3, 10);
|
||||||
@ -353,41 +295,6 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
assertEquals(method, request.getMethod());
|
assertEquals(method, request.getMethod());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCreateIndex() throws IOException {
|
|
||||||
CreateIndexRequest createIndexRequest = randomCreateIndexRequest();
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomTimeout(createIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
|
||||||
setRandomMasterTimeout(createIndexRequest, expectedParams);
|
|
||||||
setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.createIndex(createIndexRequest);
|
|
||||||
assertEquals("/" + createIndexRequest.index(), request.getEndpoint());
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
|
||||||
assertToXContentBody(createIndexRequest, request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testCreateIndexNullIndex() {
|
|
||||||
ActionRequestValidationException validationException = new CreateIndexRequest(null).validate();
|
|
||||||
assertNotNull(validationException);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testUpdateAliases() throws IOException {
|
|
||||||
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
|
|
||||||
AliasActions aliasAction = randomAliasAction();
|
|
||||||
indicesAliasesRequest.addAliasAction(aliasAction);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomTimeout(indicesAliasesRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
|
||||||
setRandomMasterTimeout(indicesAliasesRequest, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.updateAliases(indicesAliasesRequest);
|
|
||||||
assertEquals("/_aliases", request.getEndpoint());
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
assertToXContentBody(indicesAliasesRequest, request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testReindex() throws IOException {
|
public void testReindex() throws IOException {
|
||||||
ReindexRequest reindexRequest = new ReindexRequest();
|
ReindexRequest reindexRequest = new ReindexRequest();
|
||||||
reindexRequest.setSourceIndices("source_idx");
|
reindexRequest.setSourceIndices("source_idx");
|
||||||
@ -547,282 +454,6 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
assertToXContentBody(deleteByQueryRequest, request.getEntity());
|
assertToXContentBody(deleteByQueryRequest, request.getEntity());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPutMapping() throws IOException {
|
|
||||||
PutMappingRequest putMappingRequest = new PutMappingRequest();
|
|
||||||
|
|
||||||
String[] indices = randomIndicesNames(0, 5);
|
|
||||||
putMappingRequest.indices(indices);
|
|
||||||
|
|
||||||
String type = randomAlphaOfLengthBetween(3, 10);
|
|
||||||
putMappingRequest.type(type);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
|
|
||||||
setRandomTimeout(putMappingRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
|
||||||
setRandomMasterTimeout(putMappingRequest, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.putMapping(putMappingRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
String index = String.join(",", indices);
|
|
||||||
if (Strings.hasLength(index)) {
|
|
||||||
endpoint.add(index);
|
|
||||||
}
|
|
||||||
endpoint.add("_mapping");
|
|
||||||
endpoint.add(type);
|
|
||||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
|
||||||
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
|
||||||
assertToXContentBody(putMappingRequest, request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testGetMapping() throws IOException {
|
|
||||||
GetMappingsRequest getMappingRequest = new GetMappingsRequest();
|
|
||||||
|
|
||||||
String[] indices = Strings.EMPTY_ARRAY;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
indices = randomIndicesNames(0, 5);
|
|
||||||
getMappingRequest.indices(indices);
|
|
||||||
} else if (randomBoolean()) {
|
|
||||||
getMappingRequest.indices((String[]) null);
|
|
||||||
}
|
|
||||||
|
|
||||||
String type = null;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
type = randomAlphaOfLengthBetween(3, 10);
|
|
||||||
getMappingRequest.types(type);
|
|
||||||
} else if (randomBoolean()) {
|
|
||||||
getMappingRequest.types((String[]) null);
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
|
|
||||||
setRandomIndicesOptions(getMappingRequest::indicesOptions, getMappingRequest::indicesOptions, expectedParams);
|
|
||||||
setRandomMasterTimeout(getMappingRequest, expectedParams);
|
|
||||||
setRandomLocal(getMappingRequest, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.getMappings(getMappingRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
String index = String.join(",", indices);
|
|
||||||
if (Strings.hasLength(index)) {
|
|
||||||
endpoint.add(index);
|
|
||||||
}
|
|
||||||
endpoint.add("_mapping");
|
|
||||||
if (type != null) {
|
|
||||||
endpoint.add(type);
|
|
||||||
}
|
|
||||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
|
||||||
|
|
||||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
|
||||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testGetFieldMapping() throws IOException {
|
|
||||||
GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest();
|
|
||||||
|
|
||||||
String[] indices = Strings.EMPTY_ARRAY;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
indices = randomIndicesNames(0, 5);
|
|
||||||
getFieldMappingsRequest.indices(indices);
|
|
||||||
} else if (randomBoolean()) {
|
|
||||||
getFieldMappingsRequest.indices((String[]) null);
|
|
||||||
}
|
|
||||||
|
|
||||||
String type = null;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
type = randomAlphaOfLengthBetween(3, 10);
|
|
||||||
getFieldMappingsRequest.types(type);
|
|
||||||
} else if (randomBoolean()) {
|
|
||||||
getFieldMappingsRequest.types((String[]) null);
|
|
||||||
}
|
|
||||||
|
|
||||||
String[] fields = null;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
fields = new String[randomIntBetween(1, 5)];
|
|
||||||
for (int i = 0; i < fields.length; i++) {
|
|
||||||
fields[i] = randomAlphaOfLengthBetween(3, 10);
|
|
||||||
}
|
|
||||||
getFieldMappingsRequest.fields(fields);
|
|
||||||
} else if (randomBoolean()) {
|
|
||||||
getFieldMappingsRequest.fields((String[]) null);
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
|
|
||||||
setRandomIndicesOptions(getFieldMappingsRequest::indicesOptions, getFieldMappingsRequest::indicesOptions, expectedParams);
|
|
||||||
setRandomLocal(getFieldMappingsRequest::local, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.getFieldMapping(getFieldMappingsRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
String index = String.join(",", indices);
|
|
||||||
if (Strings.hasLength(index)) {
|
|
||||||
endpoint.add(index);
|
|
||||||
}
|
|
||||||
endpoint.add("_mapping");
|
|
||||||
if (type != null) {
|
|
||||||
endpoint.add(type);
|
|
||||||
}
|
|
||||||
endpoint.add("field");
|
|
||||||
if (fields != null) {
|
|
||||||
endpoint.add(String.join(",", fields));
|
|
||||||
}
|
|
||||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
|
||||||
|
|
||||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
|
||||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDeleteIndex() {
|
|
||||||
String[] indices = randomIndicesNames(0, 5);
|
|
||||||
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomTimeout(deleteIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
|
||||||
setRandomMasterTimeout(deleteIndexRequest, expectedParams);
|
|
||||||
|
|
||||||
setRandomIndicesOptions(deleteIndexRequest::indicesOptions, deleteIndexRequest::indicesOptions, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.deleteIndex(deleteIndexRequest);
|
|
||||||
assertEquals("/" + String.join(",", indices), request.getEndpoint());
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
|
||||||
assertNull(request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testGetSettings() throws IOException {
|
|
||||||
String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
|
|
||||||
GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indicesUnderTest);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomMasterTimeout(getSettingsRequest, expectedParams);
|
|
||||||
setRandomIndicesOptions(getSettingsRequest::indicesOptions, getSettingsRequest::indicesOptions, expectedParams);
|
|
||||||
|
|
||||||
setRandomLocal(getSettingsRequest, expectedParams);
|
|
||||||
|
|
||||||
if (randomBoolean()) {
|
|
||||||
// the request object will not have include_defaults present unless it is set to
|
|
||||||
// true
|
|
||||||
getSettingsRequest.includeDefaults(randomBoolean());
|
|
||||||
if (getSettingsRequest.includeDefaults()) {
|
|
||||||
expectedParams.put("include_defaults", Boolean.toString(true));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indicesUnderTest != null && indicesUnderTest.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indicesUnderTest));
|
|
||||||
}
|
|
||||||
endpoint.add("_settings");
|
|
||||||
|
|
||||||
if (randomBoolean()) {
|
|
||||||
String[] names = randomBoolean() ? null : new String[randomIntBetween(0, 3)];
|
|
||||||
if (names != null) {
|
|
||||||
for (int x = 0; x < names.length; x++) {
|
|
||||||
names[x] = randomAlphaOfLengthBetween(3, 10);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
getSettingsRequest.names(names);
|
|
||||||
if (names != null && names.length > 0) {
|
|
||||||
endpoint.add(String.join(",", names));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Request request = RequestConverters.getSettings(getSettingsRequest);
|
|
||||||
|
|
||||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testGetIndex() throws IOException {
|
|
||||||
String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
|
|
||||||
GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indicesUnderTest);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomMasterTimeout(getIndexRequest, expectedParams);
|
|
||||||
setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams);
|
|
||||||
setRandomLocal(getIndexRequest, expectedParams);
|
|
||||||
setRandomHumanReadable(getIndexRequest, expectedParams);
|
|
||||||
|
|
||||||
if (randomBoolean()) {
|
|
||||||
// the request object will not have include_defaults present unless it is set to
|
|
||||||
// true
|
|
||||||
getIndexRequest.includeDefaults(randomBoolean());
|
|
||||||
if (getIndexRequest.includeDefaults()) {
|
|
||||||
expectedParams.put("include_defaults", Boolean.toString(true));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indicesUnderTest != null && indicesUnderTest.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indicesUnderTest));
|
|
||||||
}
|
|
||||||
|
|
||||||
Request request = RequestConverters.getIndex(getIndexRequest);
|
|
||||||
|
|
||||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDeleteIndexEmptyIndices() {
|
|
||||||
String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY;
|
|
||||||
ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate();
|
|
||||||
assertNotNull(validationException);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testOpenIndex() {
|
|
||||||
String[] indices = randomIndicesNames(1, 5);
|
|
||||||
OpenIndexRequest openIndexRequest = new OpenIndexRequest(indices);
|
|
||||||
openIndexRequest.indices(indices);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomTimeout(openIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
|
||||||
setRandomMasterTimeout(openIndexRequest, expectedParams);
|
|
||||||
setRandomIndicesOptions(openIndexRequest::indicesOptions, openIndexRequest::indicesOptions, expectedParams);
|
|
||||||
setRandomWaitForActiveShards(openIndexRequest::waitForActiveShards, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.openIndex(openIndexRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "").add(String.join(",", indices)).add("_open");
|
|
||||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
|
||||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testOpenIndexEmptyIndices() {
|
|
||||||
String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY;
|
|
||||||
ActionRequestValidationException validationException = new OpenIndexRequest(indices).validate();
|
|
||||||
assertNotNull(validationException);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testCloseIndex() {
|
|
||||||
String[] indices = randomIndicesNames(1, 5);
|
|
||||||
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(indices);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomTimeout(closeIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
|
||||||
setRandomMasterTimeout(closeIndexRequest, expectedParams);
|
|
||||||
setRandomIndicesOptions(closeIndexRequest::indicesOptions, closeIndexRequest::indicesOptions, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.closeIndex(closeIndexRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "").add(String.join(",", indices)).add("_close");
|
|
||||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
|
||||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testCloseIndexEmptyIndices() {
|
|
||||||
String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY;
|
|
||||||
ActionRequestValidationException validationException = new CloseIndexRequest(indices).validate();
|
|
||||||
assertNotNull(validationException);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIndex() throws IOException {
|
public void testIndex() throws IOException {
|
||||||
String index = randomAlphaOfLengthBetween(3, 10);
|
String index = randomAlphaOfLengthBetween(3, 10);
|
||||||
String type = randomAlphaOfLengthBetween(3, 10);
|
String type = randomAlphaOfLengthBetween(3, 10);
|
||||||
@ -896,161 +527,6 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRefresh() {
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
RefreshRequest refreshRequest;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
refreshRequest = new RefreshRequest(indices);
|
|
||||||
} else {
|
|
||||||
refreshRequest = new RefreshRequest();
|
|
||||||
refreshRequest.indices(indices);
|
|
||||||
}
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomIndicesOptions(refreshRequest::indicesOptions, refreshRequest::indicesOptions, expectedParams);
|
|
||||||
Request request = RequestConverters.refresh(refreshRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indices != null && indices.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indices));
|
|
||||||
}
|
|
||||||
endpoint.add("_refresh");
|
|
||||||
assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testFlush() {
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
FlushRequest flushRequest;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
flushRequest = new FlushRequest(indices);
|
|
||||||
} else {
|
|
||||||
flushRequest = new FlushRequest();
|
|
||||||
flushRequest.indices(indices);
|
|
||||||
}
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomIndicesOptions(flushRequest::indicesOptions, flushRequest::indicesOptions, expectedParams);
|
|
||||||
if (randomBoolean()) {
|
|
||||||
flushRequest.force(randomBoolean());
|
|
||||||
}
|
|
||||||
expectedParams.put("force", Boolean.toString(flushRequest.force()));
|
|
||||||
if (randomBoolean()) {
|
|
||||||
flushRequest.waitIfOngoing(randomBoolean());
|
|
||||||
}
|
|
||||||
expectedParams.put("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing()));
|
|
||||||
|
|
||||||
Request request = RequestConverters.flush(flushRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indices != null && indices.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indices));
|
|
||||||
}
|
|
||||||
endpoint.add("_flush");
|
|
||||||
assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSyncedFlush() {
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
SyncedFlushRequest syncedFlushRequest;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
syncedFlushRequest = new SyncedFlushRequest(indices);
|
|
||||||
} else {
|
|
||||||
syncedFlushRequest = new SyncedFlushRequest();
|
|
||||||
syncedFlushRequest.indices(indices);
|
|
||||||
}
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomIndicesOptions(syncedFlushRequest::indicesOptions, syncedFlushRequest::indicesOptions, expectedParams);
|
|
||||||
Request request = RequestConverters.flushSynced(syncedFlushRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indices != null && indices.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indices));
|
|
||||||
}
|
|
||||||
endpoint.add("_flush/synced");
|
|
||||||
assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testForceMerge() {
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
ForceMergeRequest forceMergeRequest;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
forceMergeRequest = new ForceMergeRequest(indices);
|
|
||||||
} else {
|
|
||||||
forceMergeRequest = new ForceMergeRequest();
|
|
||||||
forceMergeRequest.indices(indices);
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomIndicesOptions(forceMergeRequest::indicesOptions, forceMergeRequest::indicesOptions, expectedParams);
|
|
||||||
if (randomBoolean()) {
|
|
||||||
forceMergeRequest.maxNumSegments(randomInt());
|
|
||||||
}
|
|
||||||
expectedParams.put("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments()));
|
|
||||||
if (randomBoolean()) {
|
|
||||||
forceMergeRequest.onlyExpungeDeletes(randomBoolean());
|
|
||||||
}
|
|
||||||
expectedParams.put("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes()));
|
|
||||||
if (randomBoolean()) {
|
|
||||||
forceMergeRequest.flush(randomBoolean());
|
|
||||||
}
|
|
||||||
expectedParams.put("flush", Boolean.toString(forceMergeRequest.flush()));
|
|
||||||
|
|
||||||
Request request = RequestConverters.forceMerge(forceMergeRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indices != null && indices.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indices));
|
|
||||||
}
|
|
||||||
endpoint.add("_forcemerge");
|
|
||||||
assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testClearCache() {
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
ClearIndicesCacheRequest clearIndicesCacheRequest;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
clearIndicesCacheRequest = new ClearIndicesCacheRequest(indices);
|
|
||||||
} else {
|
|
||||||
clearIndicesCacheRequest = new ClearIndicesCacheRequest();
|
|
||||||
clearIndicesCacheRequest.indices(indices);
|
|
||||||
}
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomIndicesOptions(clearIndicesCacheRequest::indicesOptions, clearIndicesCacheRequest::indicesOptions, expectedParams);
|
|
||||||
if (randomBoolean()) {
|
|
||||||
clearIndicesCacheRequest.queryCache(randomBoolean());
|
|
||||||
}
|
|
||||||
expectedParams.put("query", Boolean.toString(clearIndicesCacheRequest.queryCache()));
|
|
||||||
if (randomBoolean()) {
|
|
||||||
clearIndicesCacheRequest.fieldDataCache(randomBoolean());
|
|
||||||
}
|
|
||||||
expectedParams.put("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache()));
|
|
||||||
if (randomBoolean()) {
|
|
||||||
clearIndicesCacheRequest.requestCache(randomBoolean());
|
|
||||||
}
|
|
||||||
expectedParams.put("request", Boolean.toString(clearIndicesCacheRequest.requestCache()));
|
|
||||||
if (randomBoolean()) {
|
|
||||||
clearIndicesCacheRequest.fields(randomIndicesNames(1, 5));
|
|
||||||
expectedParams.put("fields", String.join(",", clearIndicesCacheRequest.fields()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Request request = RequestConverters.clearCache(clearIndicesCacheRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indices != null && indices.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indices));
|
|
||||||
}
|
|
||||||
endpoint.add("_cache/clear");
|
|
||||||
assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testUpdate() throws IOException {
|
public void testUpdate() throws IOException {
|
||||||
XContentType xContentType = randomFrom(XContentType.values());
|
XContentType xContentType = randomFrom(XContentType.values());
|
||||||
|
|
||||||
@ -1608,54 +1084,6 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity)));
|
assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity)));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExistsAlias() {
|
|
||||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
getAliasesRequest.indices(indices);
|
|
||||||
// the HEAD endpoint requires at least an alias or an index
|
|
||||||
boolean hasIndices = indices != null && indices.length > 0;
|
|
||||||
String[] aliases;
|
|
||||||
if (hasIndices) {
|
|
||||||
aliases = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
} else {
|
|
||||||
aliases = randomIndicesNames(1, 5);
|
|
||||||
}
|
|
||||||
getAliasesRequest.aliases(aliases);
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomLocal(getAliasesRequest, expectedParams);
|
|
||||||
setRandomIndicesOptions(getAliasesRequest::indicesOptions, getAliasesRequest::indicesOptions, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.existsAlias(getAliasesRequest);
|
|
||||||
StringJoiner expectedEndpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indices != null && indices.length > 0) {
|
|
||||||
expectedEndpoint.add(String.join(",", indices));
|
|
||||||
}
|
|
||||||
expectedEndpoint.add("_alias");
|
|
||||||
if (aliases != null && aliases.length > 0) {
|
|
||||||
expectedEndpoint.add(String.join(",", aliases));
|
|
||||||
}
|
|
||||||
assertEquals(HttpHead.METHOD_NAME, request.getMethod());
|
|
||||||
assertEquals(expectedEndpoint.toString(), request.getEndpoint());
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
assertNull(request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testExistsAliasNoAliasNoIndex() {
|
|
||||||
{
|
|
||||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
|
||||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
|
||||||
() -> RequestConverters.existsAlias(getAliasesRequest));
|
|
||||||
assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
|
|
||||||
}
|
|
||||||
{
|
|
||||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[]) null);
|
|
||||||
getAliasesRequest.indices((String[]) null);
|
|
||||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
|
||||||
() -> RequestConverters.existsAlias(getAliasesRequest));
|
|
||||||
assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testExplain() throws IOException {
|
public void testExplain() throws IOException {
|
||||||
String index = randomAlphaOfLengthBetween(3, 10);
|
String index = randomAlphaOfLengthBetween(3, 10);
|
||||||
String type = randomAlphaOfLengthBetween(3, 10);
|
String type = randomAlphaOfLengthBetween(3, 10);
|
||||||
@ -1759,245 +1187,6 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
assertToXContentBody(spec, request.getEntity());
|
assertToXContentBody(spec, request.getEntity());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSplit() throws IOException {
|
|
||||||
resizeTest(ResizeType.SPLIT, RequestConverters::split);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSplitWrongResizeType() {
|
|
||||||
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
|
|
||||||
resizeRequest.setResizeType(ResizeType.SHRINK);
|
|
||||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> RequestConverters.split(resizeRequest));
|
|
||||||
assertEquals("Wrong resize type [SHRINK] for indices split request", iae.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testShrinkWrongResizeType() {
|
|
||||||
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
|
|
||||||
resizeRequest.setResizeType(ResizeType.SPLIT);
|
|
||||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> RequestConverters.shrink(resizeRequest));
|
|
||||||
assertEquals("Wrong resize type [SPLIT] for indices shrink request", iae.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testShrink() throws IOException {
|
|
||||||
resizeTest(ResizeType.SHRINK, RequestConverters::shrink);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void resizeTest(ResizeType resizeType, CheckedFunction<ResizeRequest, Request, IOException> function)
|
|
||||||
throws IOException {
|
|
||||||
String[] indices = randomIndicesNames(2, 2);
|
|
||||||
ResizeRequest resizeRequest = new ResizeRequest(indices[0], indices[1]);
|
|
||||||
resizeRequest.setResizeType(resizeType);
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomMasterTimeout(resizeRequest, expectedParams);
|
|
||||||
setRandomTimeout(resizeRequest::timeout, resizeRequest.timeout(), expectedParams);
|
|
||||||
|
|
||||||
if (randomBoolean()) {
|
|
||||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(randomAlphaOfLengthBetween(3, 10));
|
|
||||||
if (randomBoolean()) {
|
|
||||||
createIndexRequest.settings(randomIndexSettings());
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
randomAliases(createIndexRequest);
|
|
||||||
}
|
|
||||||
resizeRequest.setTargetIndex(createIndexRequest);
|
|
||||||
}
|
|
||||||
setRandomWaitForActiveShards(resizeRequest::setWaitForActiveShards, expectedParams);
|
|
||||||
|
|
||||||
Request request = function.apply(resizeRequest);
|
|
||||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
|
||||||
String expectedEndpoint = "/" + resizeRequest.getSourceIndex() + "/_" + resizeType.name().toLowerCase(Locale.ROOT) + "/"
|
|
||||||
+ resizeRequest.getTargetIndexRequest().index();
|
|
||||||
assertEquals(expectedEndpoint, request.getEndpoint());
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
assertToXContentBody(resizeRequest, request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testRollover() throws IOException {
|
|
||||||
RolloverRequest rolloverRequest = new RolloverRequest(randomAlphaOfLengthBetween(3, 10),
|
|
||||||
randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10));
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomTimeout(rolloverRequest::timeout, rolloverRequest.timeout(), expectedParams);
|
|
||||||
setRandomMasterTimeout(rolloverRequest, expectedParams);
|
|
||||||
if (randomBoolean()) {
|
|
||||||
rolloverRequest.dryRun(randomBoolean());
|
|
||||||
if (rolloverRequest.isDryRun()) {
|
|
||||||
expectedParams.put("dry_run", "true");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
rolloverRequest.addMaxIndexAgeCondition(new TimeValue(randomNonNegativeLong()));
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
String type = randomAlphaOfLengthBetween(3, 10);
|
|
||||||
rolloverRequest.getCreateIndexRequest().mapping(type, RandomCreateIndexGenerator.randomMapping(type));
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest());
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
rolloverRequest.getCreateIndexRequest().settings(RandomCreateIndexGenerator.randomIndexSettings());
|
|
||||||
}
|
|
||||||
setRandomWaitForActiveShards(rolloverRequest.getCreateIndexRequest()::waitForActiveShards, expectedParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.rollover(rolloverRequest);
|
|
||||||
if (rolloverRequest.getNewIndexName() == null) {
|
|
||||||
assertEquals("/" + rolloverRequest.getAlias() + "/_rollover", request.getEndpoint());
|
|
||||||
} else {
|
|
||||||
assertEquals("/" + rolloverRequest.getAlias() + "/_rollover/" + rolloverRequest.getNewIndexName(), request.getEndpoint());
|
|
||||||
}
|
|
||||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
|
||||||
assertToXContentBody(rolloverRequest, request.getEntity());
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testGetAlias() {
|
|
||||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomLocal(getAliasesRequest, expectedParams);
|
|
||||||
setRandomIndicesOptions(getAliasesRequest::indicesOptions, getAliasesRequest::indicesOptions, expectedParams);
|
|
||||||
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 2);
|
|
||||||
String[] aliases = randomBoolean() ? null : randomIndicesNames(0, 2);
|
|
||||||
getAliasesRequest.indices(indices);
|
|
||||||
getAliasesRequest.aliases(aliases);
|
|
||||||
|
|
||||||
Request request = RequestConverters.getAlias(getAliasesRequest);
|
|
||||||
StringJoiner expectedEndpoint = new StringJoiner("/", "/", "");
|
|
||||||
|
|
||||||
if (false == CollectionUtils.isEmpty(indices)) {
|
|
||||||
expectedEndpoint.add(String.join(",", indices));
|
|
||||||
}
|
|
||||||
expectedEndpoint.add("_alias");
|
|
||||||
|
|
||||||
if (false == CollectionUtils.isEmpty(aliases)) {
|
|
||||||
expectedEndpoint.add(String.join(",", aliases));
|
|
||||||
}
|
|
||||||
|
|
||||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
|
||||||
assertEquals(expectedEndpoint.toString(), request.getEndpoint());
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
assertNull(request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIndexPutSettings() throws IOException {
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 2);
|
|
||||||
UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indices);
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomMasterTimeout(updateSettingsRequest, expectedParams);
|
|
||||||
setRandomTimeout(updateSettingsRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
|
||||||
setRandomIndicesOptions(updateSettingsRequest::indicesOptions, updateSettingsRequest::indicesOptions, expectedParams);
|
|
||||||
if (randomBoolean()) {
|
|
||||||
updateSettingsRequest.setPreserveExisting(randomBoolean());
|
|
||||||
if (updateSettingsRequest.isPreserveExisting()) {
|
|
||||||
expectedParams.put("preserve_existing", "true");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Request request = RequestConverters.indexPutSettings(updateSettingsRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indices != null && indices.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indices));
|
|
||||||
}
|
|
||||||
endpoint.add("_settings");
|
|
||||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
|
||||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
|
||||||
assertToXContentBody(updateSettingsRequest, request.getEntity());
|
|
||||||
assertEquals(expectedParams, request.getParameters());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testPutTemplateRequest() throws Exception {
|
|
||||||
Map<String, String> names = new HashMap<>();
|
|
||||||
names.put("log", "log");
|
|
||||||
names.put("template#1", "template%231");
|
|
||||||
names.put("-#template", "-%23template");
|
|
||||||
names.put("foo^bar", "foo%5Ebar");
|
|
||||||
|
|
||||||
PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest().name(randomFrom(names.keySet()))
|
|
||||||
.patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false)));
|
|
||||||
if (randomBoolean()) {
|
|
||||||
putTemplateRequest.order(randomInt());
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
putTemplateRequest.version(randomInt());
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
putTemplateRequest.settings(Settings.builder().put("setting-" + randomInt(), randomTimeValue()));
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
putTemplateRequest.mapping("doc-" + randomInt(), "field-" + randomInt(), "type=" + randomFrom("text", "keyword"));
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
putTemplateRequest.alias(new Alias("alias-" + randomInt()));
|
|
||||||
}
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
if (randomBoolean()) {
|
|
||||||
expectedParams.put("create", Boolean.TRUE.toString());
|
|
||||||
putTemplateRequest.create(true);
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
|
||||||
String cause = randomUnicodeOfCodepointLengthBetween(1, 50);
|
|
||||||
putTemplateRequest.cause(cause);
|
|
||||||
expectedParams.put("cause", cause);
|
|
||||||
}
|
|
||||||
setRandomMasterTimeout(putTemplateRequest, expectedParams);
|
|
||||||
Request request = RequestConverters.putTemplate(putTemplateRequest);
|
|
||||||
assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name())));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertToXContentBody(putTemplateRequest, request.getEntity());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testValidateQuery() throws Exception {
|
|
||||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
|
||||||
String[] types = randomBoolean() ? generateRandomStringArray(5, 5, false, false) : null;
|
|
||||||
ValidateQueryRequest validateQueryRequest;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
validateQueryRequest = new ValidateQueryRequest(indices);
|
|
||||||
} else {
|
|
||||||
validateQueryRequest = new ValidateQueryRequest();
|
|
||||||
validateQueryRequest.indices(indices);
|
|
||||||
}
|
|
||||||
validateQueryRequest.types(types);
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomIndicesOptions(validateQueryRequest::indicesOptions, validateQueryRequest::indicesOptions, expectedParams);
|
|
||||||
validateQueryRequest.explain(randomBoolean());
|
|
||||||
validateQueryRequest.rewrite(randomBoolean());
|
|
||||||
validateQueryRequest.allShards(randomBoolean());
|
|
||||||
expectedParams.put("explain", Boolean.toString(validateQueryRequest.explain()));
|
|
||||||
expectedParams.put("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
|
|
||||||
expectedParams.put("all_shards", Boolean.toString(validateQueryRequest.allShards()));
|
|
||||||
Request request = RequestConverters.validateQuery(validateQueryRequest);
|
|
||||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
|
||||||
if (indices != null && indices.length > 0) {
|
|
||||||
endpoint.add(String.join(",", indices));
|
|
||||||
if (types != null && types.length > 0) {
|
|
||||||
endpoint.add(String.join(",", types));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
endpoint.add("_validate/query");
|
|
||||||
assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertToXContentBody(validateQueryRequest, request.getEntity());
|
|
||||||
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testGetTemplateRequest() throws Exception {
|
|
||||||
Map<String, String> encodes = new HashMap<>();
|
|
||||||
encodes.put("log", "log");
|
|
||||||
encodes.put("1", "1");
|
|
||||||
encodes.put("template#1", "template%231");
|
|
||||||
encodes.put("template-*", "template-*");
|
|
||||||
encodes.put("foo^bar", "foo%5Ebar");
|
|
||||||
List<String> names = randomSubsetOf(1, encodes.keySet());
|
|
||||||
GetIndexTemplatesRequest getTemplatesRequest = new GetIndexTemplatesRequest().names(names.toArray(new String[0]));
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
|
||||||
setRandomMasterTimeout(getTemplatesRequest, expectedParams);
|
|
||||||
setRandomLocal(getTemplatesRequest, expectedParams);
|
|
||||||
Request request = RequestConverters.getTemplates(getTemplatesRequest);
|
|
||||||
assertThat(request.getEndpoint(), equalTo("/_template/" + names.stream().map(encodes::get).collect(Collectors.joining(","))));
|
|
||||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
|
||||||
assertThat(request.getEntity(), nullValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testPutScript() throws Exception {
|
public void testPutScript() throws Exception {
|
||||||
PutStoredScriptRequest putStoredScriptRequest = new PutStoredScriptRequest();
|
PutStoredScriptRequest putStoredScriptRequest = new PutStoredScriptRequest();
|
||||||
|
|
||||||
@ -2410,7 +1599,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void setRandomIncludeDefaults(GetIndexRequest request, Map<String, String> expectedParams) {
|
static void setRandomIncludeDefaults(GetIndexRequest request, Map<String, String> expectedParams) {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
boolean includeDefaults = randomBoolean();
|
boolean includeDefaults = randomBoolean();
|
||||||
request.includeDefaults(includeDefaults);
|
request.includeDefaults(includeDefaults);
|
||||||
@ -2420,7 +1609,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void setRandomHumanReadable(GetIndexRequest request, Map<String, String> expectedParams) {
|
static void setRandomHumanReadable(GetIndexRequest request, Map<String, String> expectedParams) {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
boolean humanReadable = randomBoolean();
|
boolean humanReadable = randomBoolean();
|
||||||
request.humanReadable(humanReadable);
|
request.humanReadable(humanReadable);
|
||||||
@ -2430,7 +1619,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void setRandomLocal(Consumer<Boolean> setter, Map<String, String> expectedParams) {
|
static void setRandomLocal(Consumer<Boolean> setter, Map<String, String> expectedParams) {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
boolean local = randomBoolean();
|
boolean local = randomBoolean();
|
||||||
setter.accept(local);
|
setter.accept(local);
|
||||||
@ -2485,7 +1674,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void setRandomWaitForActiveShards(Consumer<ActiveShardCount> setter, Map<String, String> expectedParams) {
|
static void setRandomWaitForActiveShards(Consumer<ActiveShardCount> setter, Map<String, String> expectedParams) {
|
||||||
setRandomWaitForActiveShards(setter, ActiveShardCount.DEFAULT, expectedParams);
|
setRandomWaitForActiveShards(setter, ActiveShardCount.DEFAULT, expectedParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,6 +43,8 @@ import org.elasticsearch.client.ml.ForecastJobRequest;
|
|||||||
import org.elasticsearch.client.ml.ForecastJobResponse;
|
import org.elasticsearch.client.ml.ForecastJobResponse;
|
||||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||||
import org.elasticsearch.client.ml.GetBucketsResponse;
|
import org.elasticsearch.client.ml.GetBucketsResponse;
|
||||||
|
import org.elasticsearch.client.ml.GetCalendarsRequest;
|
||||||
|
import org.elasticsearch.client.ml.GetCalendarsResponse;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
import org.elasticsearch.client.ml.GetCategoriesRequest;
|
||||||
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
import org.elasticsearch.client.ml.GetCategoriesResponse;
|
||||||
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
import org.elasticsearch.client.ml.GetDatafeedRequest;
|
||||||
@ -880,6 +882,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||||||
PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder);
|
PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder);
|
||||||
client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
|
client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
|
||||||
client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT);
|
client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT);
|
||||||
|
|
||||||
ForecastJobResponse forecastJobResponse = client.machineLearning().
|
ForecastJobResponse forecastJobResponse = client.machineLearning().
|
||||||
forecastJob(new ForecastJobRequest(job.getId()), RequestOptions.DEFAULT);
|
forecastJob(new ForecastJobRequest(job.getId()), RequestOptions.DEFAULT);
|
||||||
String forecastId = forecastJobResponse.getForecastId();
|
String forecastId = forecastJobResponse.getForecastId();
|
||||||
@ -1526,4 +1529,66 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||||||
|
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetCalendar() throws IOException, InterruptedException {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
|
||||||
|
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
|
||||||
|
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
|
||||||
|
{
|
||||||
|
//tag::x-pack-ml-get-calendars-request
|
||||||
|
GetCalendarsRequest request = new GetCalendarsRequest(); // <1>
|
||||||
|
//end::x-pack-ml-get-calendars-request
|
||||||
|
|
||||||
|
//tag::x-pack-ml-get-calendars-id
|
||||||
|
request.setCalendarId("holidays"); // <1>
|
||||||
|
//end::x-pack-ml-get-calendars-id
|
||||||
|
|
||||||
|
//tag::x-pack-ml-get-calendars-page
|
||||||
|
request.setPageParams(new PageParams(10, 20)); // <1>
|
||||||
|
//end::x-pack-ml-get-calendars-page
|
||||||
|
|
||||||
|
// reset page params
|
||||||
|
request.setPageParams(null);
|
||||||
|
|
||||||
|
//tag::x-pack-ml-get-calendars-execution
|
||||||
|
GetCalendarsResponse response = client.machineLearning().getCalendars(request, RequestOptions.DEFAULT);
|
||||||
|
//end::x-pack-ml-get-calendars-execution
|
||||||
|
|
||||||
|
// tag::x-pack-ml-get-calendars-response
|
||||||
|
long count = response.count(); // <1>
|
||||||
|
List<Calendar> calendars = response.calendars(); // <2>
|
||||||
|
// end::x-pack-ml-get-calendars-response
|
||||||
|
assertEquals(1, calendars.size());
|
||||||
|
}
|
||||||
|
{
|
||||||
|
GetCalendarsRequest request = new GetCalendarsRequest("holidays");
|
||||||
|
|
||||||
|
// tag::x-pack-ml-get-calendars-listener
|
||||||
|
ActionListener<GetCalendarsResponse> listener =
|
||||||
|
new ActionListener<GetCalendarsResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(GetCalendarsResponse getCalendarsResponse) {
|
||||||
|
// <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// end::x-pack-ml-get-calendars-listener
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
// tag::x-pack-ml-get-calendars-execute-async
|
||||||
|
client.machineLearning().getCalendarsAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||||
|
// end::x-pack-ml-get-calendars-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,46 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.job.util.PageParams;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
public class GetCalendarsRequestTests extends AbstractXContentTestCase<GetCalendarsRequest> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetCalendarsRequest createTestInstance() {
|
||||||
|
GetCalendarsRequest request = new GetCalendarsRequest();
|
||||||
|
request.setCalendarId(randomAlphaOfLength(9));
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setPageParams(new PageParams(1, 2));
|
||||||
|
}
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetCalendarsRequest doParseInstance(XContentParser parser) {
|
||||||
|
return GetCalendarsRequest.PARSER.apply(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,52 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client.ml;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.ml.calendars.Calendar;
|
||||||
|
import org.elasticsearch.client.ml.calendars.CalendarTests;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class GetCalendarsResponseTests extends AbstractXContentTestCase<GetCalendarsResponse> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetCalendarsResponse createTestInstance() {
|
||||||
|
List<Calendar> calendars = new ArrayList<>();
|
||||||
|
int count = randomIntBetween(0, 3);
|
||||||
|
for (int i=0; i<count; i++) {
|
||||||
|
calendars.add(CalendarTests.testInstance());
|
||||||
|
}
|
||||||
|
return new GetCalendarsResponse(calendars, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetCalendarsResponse doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return GetCalendarsResponse.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
@ -56,8 +56,6 @@ integTestCluster {
|
|||||||
|
|
||||||
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
|
// TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults
|
||||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||||
//TODO: remove this once the cname is prepended to the address by default in 7.0
|
|
||||||
systemProperty 'es.http.cname_in_publish_address', 'true'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed
|
// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed
|
||||||
|
83
docs/java-rest/high-level/ml/get-calendars.asciidoc
Normal file
83
docs/java-rest/high-level/ml/get-calendars.asciidoc
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
[[java-rest-high-x-pack-ml-get-calendars]]
|
||||||
|
=== Get Calendars API
|
||||||
|
Retrieves one or more calendar objects.
|
||||||
|
It accepts a `GetCalendarsRequest` and responds
|
||||||
|
with a `GetCalendarsResponse` object.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-calendars-request]]
|
||||||
|
==== Get Calendars Request
|
||||||
|
|
||||||
|
By default a `GetCalendarsRequest` with no calendar Id set will return all
|
||||||
|
calendars. Using the literal `_all` also returns all calendars.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-request]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Constructing a new request for all calendars
|
||||||
|
|
||||||
|
|
||||||
|
==== Optional Arguments
|
||||||
|
The following arguments are optional:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-id]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Construct a request for the single calendar `holidays`
|
||||||
|
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-page]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The page parameters `from` and `size`. `from` specifies the number of calendars to skip.
|
||||||
|
`size` specifies the maximum number of calendars to get. Defaults to `0` and `100` respectively.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-calendars-execution]]
|
||||||
|
==== Execution
|
||||||
|
The request can be executed through the `MachineLearningClient` contained
|
||||||
|
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-execution]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-calendars-execution-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
The request can also be executed asynchronously:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `GetCalendarsRequest` to execute and the `ActionListener` to use when
|
||||||
|
the execution completes
|
||||||
|
|
||||||
|
The asynchronous method does not block and returns immediately. Once it is
|
||||||
|
completed the `ActionListener` is called back with the `onResponse` method
|
||||||
|
if the execution is successful or the `onFailure` method if the execution
|
||||||
|
failed.
|
||||||
|
|
||||||
|
A typical listener for `GetCalendarsResponse` looks like:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> `onResponse` is called back when the action is completed successfully
|
||||||
|
<2> `onFailure` is called back when some unexpected error occurs
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-ml-get-calendars-response]]
|
||||||
|
==== Get calendars Response
|
||||||
|
|
||||||
|
The returned `GetCalendarsResponse` contains the requested calendars:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-response]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The count of calendars that were matched
|
||||||
|
<2> The calendars retrieved
|
@ -4,7 +4,7 @@ Creates a new {ml} calendar.
|
|||||||
The API accepts a `PutCalendarRequest` and responds
|
The API accepts a `PutCalendarRequest` and responds
|
||||||
with a `PutCalendarResponse` object.
|
with a `PutCalendarResponse` object.
|
||||||
|
|
||||||
[[java-rest-high-x-pack-ml-get-calendars-request]]
|
[[java-rest-high-x-pack-ml-put-calendar-request]]
|
||||||
==== Put Calendar Request
|
==== Put Calendar Request
|
||||||
|
|
||||||
A `PutCalendarRequest` is constructed with a Calendar object
|
A `PutCalendarRequest` is constructed with a Calendar object
|
||||||
|
@ -231,6 +231,7 @@ The Java High Level REST Client supports the following Machine Learning APIs:
|
|||||||
* <<java-rest-high-x-pack-ml-post-data>>
|
* <<java-rest-high-x-pack-ml-post-data>>
|
||||||
* <<java-rest-high-x-pack-ml-get-influencers>>
|
* <<java-rest-high-x-pack-ml-get-influencers>>
|
||||||
* <<java-rest-high-x-pack-ml-get-categories>>
|
* <<java-rest-high-x-pack-ml-get-categories>>
|
||||||
|
* <<java-rest-high-x-pack-ml-get-calendars>>
|
||||||
* <<java-rest-high-x-pack-ml-put-calendar>>
|
* <<java-rest-high-x-pack-ml-put-calendar>>
|
||||||
|
|
||||||
include::ml/put-job.asciidoc[]
|
include::ml/put-job.asciidoc[]
|
||||||
@ -252,6 +253,7 @@ include::ml/get-records.asciidoc[]
|
|||||||
include::ml/post-data.asciidoc[]
|
include::ml/post-data.asciidoc[]
|
||||||
include::ml/get-influencers.asciidoc[]
|
include::ml/get-influencers.asciidoc[]
|
||||||
include::ml/get-categories.asciidoc[]
|
include::ml/get-categories.asciidoc[]
|
||||||
|
include::ml/get-calendars.asciidoc[]
|
||||||
include::ml/put-calendar.asciidoc[]
|
include::ml/put-calendar.asciidoc[]
|
||||||
|
|
||||||
== Migration APIs
|
== Migration APIs
|
||||||
|
@ -54,7 +54,7 @@ include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-config-basi
|
|||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Preemptive Authentication can be disabled, which means that every request will be sent without
|
Preemptive Authentication can be disabled, which means that every request will be sent without
|
||||||
authorization headers to see if it is accepted and, upon receiving a HTTP 401 response, it will
|
authorization headers to see if it is accepted and, upon receiving an HTTP 401 response, it will
|
||||||
resend the exact same request with the basic authentication header. If you wish to do this, then
|
resend the exact same request with the basic authentication header. If you wish to do this, then
|
||||||
you can do so by disabling it via the `HttpAsyncClientBuilder`:
|
you can do so by disabling it via the `HttpAsyncClientBuilder`:
|
||||||
|
|
||||||
|
@ -1,14 +0,0 @@
|
|||||||
[[discovery-file]]
|
|
||||||
=== File-Based Discovery Plugin
|
|
||||||
|
|
||||||
The functionality provided by the `discovery-file` plugin is now available in
|
|
||||||
Elasticsearch without requiring a plugin. This plugin still exists to ensure
|
|
||||||
backwards compatibility, but it will be removed in a future version.
|
|
||||||
|
|
||||||
On installation, this plugin creates a file at
|
|
||||||
`$ES_PATH_CONF/discovery-file/unicast_hosts.txt` that comprises comments that
|
|
||||||
describe how to use it. It is preferable not to install this plugin and instead
|
|
||||||
to create this file, and its containing directory, using standard tools.
|
|
||||||
|
|
||||||
:plugin_name: discovery-file
|
|
||||||
include::install_remove.asciidoc[]
|
|
@ -21,10 +21,6 @@ The Azure Classic discovery plugin uses the Azure Classic API for unicast discov
|
|||||||
|
|
||||||
The Google Compute Engine discovery plugin uses the GCE API for unicast discovery.
|
The Google Compute Engine discovery plugin uses the GCE API for unicast discovery.
|
||||||
|
|
||||||
<<discovery-file,File-based discovery>>::
|
|
||||||
|
|
||||||
The File-based discovery plugin allows providing the unicast hosts list through a dynamically updatable file.
|
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
==== Community contributed discovery plugins
|
==== Community contributed discovery plugins
|
||||||
|
|
||||||
@ -38,5 +34,3 @@ include::discovery-ec2.asciidoc[]
|
|||||||
include::discovery-azure-classic.asciidoc[]
|
include::discovery-azure-classic.asciidoc[]
|
||||||
|
|
||||||
include::discovery-gce.asciidoc[]
|
include::discovery-gce.asciidoc[]
|
||||||
|
|
||||||
include::discovery-file.asciidoc[]
|
|
||||||
|
328
docs/plugins/mapper-annotated-text.asciidoc
Normal file
328
docs/plugins/mapper-annotated-text.asciidoc
Normal file
@ -0,0 +1,328 @@
|
|||||||
|
[[mapper-annotated-text]]
|
||||||
|
=== Mapper Annotated Text Plugin
|
||||||
|
|
||||||
|
experimental[]
|
||||||
|
|
||||||
|
The mapper-annotated-text plugin provides the ability to index text that is a
|
||||||
|
combination of free-text and special markup that is typically used to identify
|
||||||
|
items of interest such as people or organisations (see NER or Named Entity Recognition
|
||||||
|
tools).
|
||||||
|
|
||||||
|
|
||||||
|
The elasticsearch markup allows one or more additional tokens to be injected, unchanged, into the token
|
||||||
|
stream at the same position as the underlying text it annotates.
|
||||||
|
|
||||||
|
:plugin_name: mapper-annotated-text
|
||||||
|
include::install_remove.asciidoc[]
|
||||||
|
|
||||||
|
[[mapper-annotated-text-usage]]
|
||||||
|
==== Using the `annotated-text` field
|
||||||
|
|
||||||
|
The `annotated-text` tokenizes text content as per the more common `text` field (see
|
||||||
|
"limitations" below) but also injects any marked-up annotation tokens directly into
|
||||||
|
the search index:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------
|
||||||
|
PUT my_index
|
||||||
|
{
|
||||||
|
"mappings": {
|
||||||
|
"_doc": {
|
||||||
|
"properties": {
|
||||||
|
"my_field": {
|
||||||
|
"type": "annotated_text"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------
|
||||||
|
// CONSOLE
|
||||||
|
|
||||||
|
Such a mapping would allow marked-up text eg wikipedia articles to be indexed as both text
|
||||||
|
and structured tokens. The annotations use a markdown-like syntax using URL encoding of
|
||||||
|
one or more values separated by the `&` symbol.
|
||||||
|
|
||||||
|
|
||||||
|
We can use the "_analyze" api to test how an example annotation would be stored as tokens
|
||||||
|
in the search index:
|
||||||
|
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------
|
||||||
|
GET my_index/_analyze
|
||||||
|
{
|
||||||
|
"field": "my_field",
|
||||||
|
"text":"Investors in [Apple](Apple+Inc.) rejoiced."
|
||||||
|
}
|
||||||
|
--------------------------
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
Response:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
{
|
||||||
|
"tokens": [
|
||||||
|
{
|
||||||
|
"token": "investors",
|
||||||
|
"start_offset": 0,
|
||||||
|
"end_offset": 9,
|
||||||
|
"type": "<ALPHANUM>",
|
||||||
|
"position": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"token": "in",
|
||||||
|
"start_offset": 10,
|
||||||
|
"end_offset": 12,
|
||||||
|
"type": "<ALPHANUM>",
|
||||||
|
"position": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"token": "Apple Inc.", <1>
|
||||||
|
"start_offset": 13,
|
||||||
|
"end_offset": 18,
|
||||||
|
"type": "annotation",
|
||||||
|
"position": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"token": "apple",
|
||||||
|
"start_offset": 13,
|
||||||
|
"end_offset": 18,
|
||||||
|
"type": "<ALPHANUM>",
|
||||||
|
"position": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"token": "rejoiced",
|
||||||
|
"start_offset": 19,
|
||||||
|
"end_offset": 27,
|
||||||
|
"type": "<ALPHANUM>",
|
||||||
|
"position": 3
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
--------------------------------------------------
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
<1> Note the whole annotation token `Apple Inc.` is placed, unchanged as a single token in
|
||||||
|
the token stream and at the same position (position 2) as the text token (`apple`) it annotates.
|
||||||
|
|
||||||
|
|
||||||
|
We can now perform searches for annotations using regular `term` queries that don't tokenize
|
||||||
|
the provided search values. Annotations are a more precise way of matching as can be seen
|
||||||
|
in this example where a search for `Beck` will not match `Jeff Beck` :
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------
|
||||||
|
# Example documents
|
||||||
|
PUT my_index/_doc/1
|
||||||
|
{
|
||||||
|
"my_field": "[Beck](Beck) announced a new tour"<2>
|
||||||
|
}
|
||||||
|
|
||||||
|
PUT my_index/_doc/2
|
||||||
|
{
|
||||||
|
"my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<1>
|
||||||
|
}
|
||||||
|
|
||||||
|
# Example search
|
||||||
|
GET my_index/_search
|
||||||
|
{
|
||||||
|
"query": {
|
||||||
|
"term": {
|
||||||
|
"my_field": "Beck" <3>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------
|
||||||
|
// CONSOLE
|
||||||
|
|
||||||
|
<1> As well as tokenising the plain text into single words e.g. `beck`, here we
|
||||||
|
inject the single token value `Beck` at the same position as `beck` in the token stream.
|
||||||
|
<2> Note annotations can inject multiple tokens at the same position - here we inject both
|
||||||
|
the very specific value `Jeff Beck` and the broader term `Guitarist`. This enables
|
||||||
|
broader positional queries e.g. finding mentions of a `Guitarist` near to `strat`.
|
||||||
|
<3> A benefit of searching with these carefully defined annotation tokens is that a query for
|
||||||
|
`Beck` will not match document 2 that contains the tokens `jeff`, `beck` and `Jeff Beck`
|
||||||
|
|
||||||
|
WARNING: Any use of `=` signs in annotation values eg `[Prince](person=Prince)` will
|
||||||
|
cause the document to be rejected with a parse failure. In future we hope to have a use for
|
||||||
|
the equals signs so wil actively reject documents that contain this today.
|
||||||
|
|
||||||
|
|
||||||
|
[[mapper-annotated-text-tips]]
|
||||||
|
==== Data modelling tips
|
||||||
|
===== Use structured and unstructured fields
|
||||||
|
|
||||||
|
Annotations are normally a way of weaving structured information into unstructured text for
|
||||||
|
higher-precision search.
|
||||||
|
|
||||||
|
`Entity resolution` is a form of document enrichment undertaken by specialist software or people
|
||||||
|
where references to entities in a document are disambiguated by attaching a canonical ID.
|
||||||
|
The ID is used to resolve any number of aliases or distinguish between people with the
|
||||||
|
same name. The hyperlinks connecting Wikipedia's articles are a good example of resolved
|
||||||
|
entity IDs woven into text.
|
||||||
|
|
||||||
|
These IDs can be embedded as annotations in an annotated_text field but it often makes
|
||||||
|
sense to include them in dedicated structured fields to support discovery via aggregations:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------
|
||||||
|
PUT my_index
|
||||||
|
{
|
||||||
|
"mappings": {
|
||||||
|
"_doc": {
|
||||||
|
"properties": {
|
||||||
|
"my_unstructured_text_field": {
|
||||||
|
"type": "annotated_text"
|
||||||
|
},
|
||||||
|
"my_structured_people_field": {
|
||||||
|
"type": "text",
|
||||||
|
"fields": {
|
||||||
|
"keyword" :{
|
||||||
|
"type": "keyword"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------
|
||||||
|
// CONSOLE
|
||||||
|
|
||||||
|
Applications would then typically provide content and discover it as follows:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------
|
||||||
|
# Example documents
|
||||||
|
PUT my_index/_doc/1
|
||||||
|
{
|
||||||
|
"my_unstructured_text_field": "[Shay](%40kimchy) created elasticsearch",
|
||||||
|
"my_twitter_handles": ["@kimchy"] <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
GET my_index/_search
|
||||||
|
{
|
||||||
|
"query": {
|
||||||
|
"query_string": {
|
||||||
|
"query": "elasticsearch OR logstash OR kibana",<2>
|
||||||
|
"default_field": "my_unstructured_text_field"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"aggregations": {
|
||||||
|
"top_people" :{
|
||||||
|
"significant_terms" : { <3>
|
||||||
|
"field" : "my_twitter_handles.keyword"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------
|
||||||
|
// CONSOLE
|
||||||
|
|
||||||
|
<1> Note the `my_twitter_handles` contains a list of the annotation values
|
||||||
|
also used in the unstructured text. (Note the annotated_text syntax requires escaping).
|
||||||
|
By repeating the annotation values in a structured field this application has ensured that
|
||||||
|
the tokens discovered in the structured field can be used for search and highlighting
|
||||||
|
in the unstructured field.
|
||||||
|
<2> In this example we search for documents that talk about components of the elastic stack
|
||||||
|
<3> We use the `my_twitter_handles` field here to discover people who are significantly
|
||||||
|
associated with the elastic stack.
|
||||||
|
|
||||||
|
===== Avoiding over-matching annotations
|
||||||
|
By design, the regular text tokens and the annotation tokens co-exist in the same indexed
|
||||||
|
field but in rare cases this can lead to some over-matching.
|
||||||
|
|
||||||
|
The value of an annotation often denotes a _named entity_ (a person, place or company).
|
||||||
|
The tokens for these named entities are inserted untokenized, and differ from typical text
|
||||||
|
tokens because they are normally:
|
||||||
|
|
||||||
|
* Mixed case e.g. `Madonna`
|
||||||
|
* Multiple words e.g. `Jeff Beck`
|
||||||
|
* Can have punctuation or numbers e.g. `Apple Inc.` or `@kimchy`
|
||||||
|
|
||||||
|
This means, for the most part, a search for a named entity in the annotated text field will
|
||||||
|
not have any false positives e.g. when selecting `Apple Inc.` from an aggregation result
|
||||||
|
you can drill down to highlight uses in the text without "over matching" on any text tokens
|
||||||
|
like the word `apple` in this context:
|
||||||
|
|
||||||
|
the apple was very juicy
|
||||||
|
|
||||||
|
However, a problem arises if your named entity happens to be a single term and lower-case e.g. the
|
||||||
|
company `elastic`. In this case, a search on the annotated text field for the token `elastic`
|
||||||
|
may match a text document such as this:
|
||||||
|
|
||||||
|
he fired an elastic band
|
||||||
|
|
||||||
|
To avoid such false matches users should consider prefixing annotation values to ensure
|
||||||
|
they don't name clash with text tokens e.g.
|
||||||
|
|
||||||
|
[elastic](Company_elastic) released version 7.0 of the elastic stack today
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
[[mapper-annotated-text-highlighter]]
|
||||||
|
==== Using the `annotated` highlighter
|
||||||
|
|
||||||
|
The `annotated-text` plugin includes a custom highlighter designed to mark up search hits
|
||||||
|
in a way which is respectful of the original markup:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------
|
||||||
|
# Example documents
|
||||||
|
PUT my_index/_doc/1
|
||||||
|
{
|
||||||
|
"my_field": "The cat sat on the [mat](sku3578)"
|
||||||
|
}
|
||||||
|
|
||||||
|
GET my_index/_search
|
||||||
|
{
|
||||||
|
"query": {
|
||||||
|
"query_string": {
|
||||||
|
"query": "cats"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"highlight": {
|
||||||
|
"fields": {
|
||||||
|
"my_field": {
|
||||||
|
"type": "annotated", <1>
|
||||||
|
"require_field_match": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------
|
||||||
|
// CONSOLE
|
||||||
|
<1> The `annotated` highlighter type is designed for use with annotated_text fields
|
||||||
|
|
||||||
|
The annotated highlighter is based on the `unified` highlighter and supports the same
|
||||||
|
settings but does not use the `pre_tags` or `post_tags` parameters. Rather than using
|
||||||
|
html-like markup such as `<em>cat</em>` the annotated highlighter uses the same
|
||||||
|
markdown-like syntax used for annotations and injects a key=value annotation where `_hit_term`
|
||||||
|
is the key and the matched search term is the value e.g.
|
||||||
|
|
||||||
|
The [cat](_hit_term=cat) sat on the [mat](sku3578)
|
||||||
|
|
||||||
|
The annotated highlighter tries to be respectful of any existing markup in the original
|
||||||
|
text:
|
||||||
|
|
||||||
|
* If the search term matches exactly the location of an existing annotation then the
|
||||||
|
`_hit_term` key is merged into the url-like syntax used in the `(...)` part of the
|
||||||
|
existing annotation.
|
||||||
|
* However, if the search term overlaps the span of an existing annotation it would break
|
||||||
|
the markup formatting so the original annotation is removed in favour of a new annotation
|
||||||
|
with just the search hit information in the results.
|
||||||
|
* Any non-overlapping annotations in the original text are preserved in highlighter
|
||||||
|
selections
|
||||||
|
|
||||||
|
|
||||||
|
[[mapper-annotated-text-limitations]]
|
||||||
|
==== Limitations
|
||||||
|
|
||||||
|
The annotated_text field type supports the same mapping settings as the `text` field type
|
||||||
|
but with the following exceptions:
|
||||||
|
|
||||||
|
* No support for `fielddata` or `fielddata_frequency_filter`
|
||||||
|
* No support for `index_prefixes` or `index_phrases` indexing
|
@ -19,5 +19,13 @@ indexes the size in bytes of the original
|
|||||||
The mapper-murmur3 plugin allows hashes to be computed at index-time and stored
|
The mapper-murmur3 plugin allows hashes to be computed at index-time and stored
|
||||||
in the index for later use with the `cardinality` aggregation.
|
in the index for later use with the `cardinality` aggregation.
|
||||||
|
|
||||||
|
<<mapper-annotated-text>>::
|
||||||
|
|
||||||
|
The annotated text plugin provides the ability to index text that is a
|
||||||
|
combination of free-text and special markup that is typically used to identify
|
||||||
|
items of interest such as people or organisations (see NER or Named Entity Recognition
|
||||||
|
tools).
|
||||||
|
|
||||||
include::mapper-size.asciidoc[]
|
include::mapper-size.asciidoc[]
|
||||||
include::mapper-murmur3.asciidoc[]
|
include::mapper-murmur3.asciidoc[]
|
||||||
|
include::mapper-annotated-text.asciidoc[]
|
||||||
|
@ -88,7 +88,7 @@ must not be contained in the `plugins` directory for the node that you are
|
|||||||
installing the plugin to or installation will fail.
|
installing the plugin to or installation will fail.
|
||||||
|
|
||||||
HTTP::
|
HTTP::
|
||||||
To install a plugin from a HTTP URL:
|
To install a plugin from an HTTP URL:
|
||||||
+
|
+
|
||||||
[source,shell]
|
[source,shell]
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
|
@ -40,7 +40,7 @@ Resulting in:
|
|||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/]
|
// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/]
|
||||||
|
|
||||||
The name of the aggregation (`intraday_return` above) also serves as the key by which the aggregation result can be retrieved from the returned response.
|
The name of the aggregation (`hat_prices` above) also serves as the key by which the aggregation result can be retrieved from the returned response.
|
||||||
|
|
||||||
==== Script
|
==== Script
|
||||||
|
|
||||||
|
@ -23,11 +23,11 @@ U7321H6 analysis-stempel {version} The Stempel (Polish) Analysis plugin i
|
|||||||
U7321H6 analysis-ukrainian {version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.
|
U7321H6 analysis-ukrainian {version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch.
|
||||||
U7321H6 discovery-azure-classic {version} The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism
|
U7321H6 discovery-azure-classic {version} The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism
|
||||||
U7321H6 discovery-ec2 {version} The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.
|
U7321H6 discovery-ec2 {version} The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.
|
||||||
U7321H6 discovery-file {version} Discovery file plugin enables unicast discovery from hosts stored in a file.
|
|
||||||
U7321H6 discovery-gce {version} The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism.
|
U7321H6 discovery-gce {version} The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism.
|
||||||
U7321H6 ingest-attachment {version} Ingest processor that uses Apache Tika to extract contents
|
U7321H6 ingest-attachment {version} Ingest processor that uses Apache Tika to extract contents
|
||||||
U7321H6 ingest-geoip {version} Ingest processor that uses looksup geo data based on ip adresses using the Maxmind geo database
|
U7321H6 ingest-geoip {version} Ingest processor that uses looksup geo data based on ip adresses using the Maxmind geo database
|
||||||
U7321H6 ingest-user-agent {version} Ingest processor that extracts information from a user agent
|
U7321H6 ingest-user-agent {version} Ingest processor that extracts information from a user agent
|
||||||
|
U7321H6 mapper-annotated-text {version} The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.
|
||||||
U7321H6 mapper-murmur3 {version} The Mapper Murmur3 plugin allows to compute hashes of a field's values at index-time and to store them in the index.
|
U7321H6 mapper-murmur3 {version} The Mapper Murmur3 plugin allows to compute hashes of a field's values at index-time and to store them in the index.
|
||||||
U7321H6 mapper-size {version} The Mapper Size plugin allows document to record their uncompressed size at index time.
|
U7321H6 mapper-size {version} The Mapper Size plugin allows document to record their uncompressed size at index time.
|
||||||
U7321H6 store-smb {version} The Store SMB plugin adds support for SMB stores.
|
U7321H6 store-smb {version} The Store SMB plugin adds support for SMB stores.
|
||||||
|
@ -35,6 +35,7 @@ string:: <<text,`text`>> and <<keyword,`keyword`>>
|
|||||||
`completion` to provide auto-complete suggestions
|
`completion` to provide auto-complete suggestions
|
||||||
<<token-count>>:: `token_count` to count the number of tokens in a string
|
<<token-count>>:: `token_count` to count the number of tokens in a string
|
||||||
{plugins}/mapper-murmur3.html[`mapper-murmur3`]:: `murmur3` to compute hashes of values at index-time and store them in the index
|
{plugins}/mapper-murmur3.html[`mapper-murmur3`]:: `murmur3` to compute hashes of values at index-time and store them in the index
|
||||||
|
{plugins}/mapper-annotated-text.html[`mapper-annotated-text`]:: `annotated-text` to index text containing special markup (typically used for identifying named entities)
|
||||||
|
|
||||||
<<percolator>>:: Accepts queries from the query-dsl
|
<<percolator>>:: Accepts queries from the query-dsl
|
||||||
|
|
||||||
|
@ -23,3 +23,11 @@ See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Stor
|
|||||||
|
|
||||||
* The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider<T> provider)` has been
|
* The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider<T> provider)` has been
|
||||||
renamed to `requiresAnalysisSettings`
|
renamed to `requiresAnalysisSettings`
|
||||||
|
|
||||||
|
==== File-based discovery plugin
|
||||||
|
|
||||||
|
* This plugin has been removed since its functionality is now part of
|
||||||
|
Elasticsearch and requires no plugin. The location of the hosts file has moved
|
||||||
|
from `$ES_PATH_CONF/file-discovery/unicast_hosts.txt` to
|
||||||
|
`$ES_PATH_CONF/unicast_hosts.txt`. See <<file-based-hosts-provider, the
|
||||||
|
file-based hosts provider documentation>> for further information.
|
||||||
|
@ -96,7 +96,7 @@ and stack traces in response output. Note: When set to `false` and the `error_tr
|
|||||||
parameter is specified, an error will be returned; when `error_trace` is not specified, a
|
parameter is specified, an error will be returned; when `error_trace` is not specified, a
|
||||||
simple message will be returned. Defaults to `true`
|
simple message will be returned. Defaults to `true`
|
||||||
|
|
||||||
|`http.pipelining.max_events` |The maximum number of events to be queued up in memory before a HTTP connection is closed, defaults to `10000`.
|
|`http.pipelining.max_events` |The maximum number of events to be queued up in memory before an HTTP connection is closed, defaults to `10000`.
|
||||||
|
|
||||||
|`http.max_warning_header_count` |The maximum number of warning headers in
|
|`http.max_warning_header_count` |The maximum number of warning headers in
|
||||||
client HTTP responses, defaults to unbounded.
|
client HTTP responses, defaults to unbounded.
|
||||||
|
@ -13,10 +13,13 @@ indices. You can also adjust how monitoring data is displayed.
|
|||||||
|
|
||||||
. To collect monitoring data about your {es} cluster:
|
. To collect monitoring data about your {es} cluster:
|
||||||
|
|
||||||
.. Verify that the `xpack.monitoring.enabled` and
|
.. Verify that the `xpack.monitoring.enabled`,
|
||||||
`xpack.monitoring.collection.enabled` settings are `true` on each node in the
|
`xpack.monitoring.collection.enabled`, and
|
||||||
cluster. By default, data collection is disabled. For more information, see
|
`xpack.monitoring.elasticsearch.collection.enabled` settings are `true` on each
|
||||||
<<monitoring-settings>>.
|
node in the cluster. By default xpack.monitoring.collection.enabled is disabled
|
||||||
|
(`false`), and that overrides xpack.monitoring.elasticsearch.collection.enabled,
|
||||||
|
which defaults to being enabled (`true`). Both settings can be set dynamically
|
||||||
|
at runtime. For more information, see <<monitoring-settings>>.
|
||||||
|
|
||||||
.. Optional: Specify which indices you want to monitor.
|
.. Optional: Specify which indices you want to monitor.
|
||||||
+
|
+
|
||||||
|
@ -16,6 +16,16 @@ monitoring data from other sources such as {kib}, Beats, and Logstash is ignored
|
|||||||
You can update this setting by using the
|
You can update this setting by using the
|
||||||
{ref}/cluster-update-settings.html[Cluster Update Settings API].
|
{ref}/cluster-update-settings.html[Cluster Update Settings API].
|
||||||
|
|
||||||
|
If you want to collect data from sources such as {kib}, Beats, and Logstash but
|
||||||
|
not collect data about your {es} cluster, you can disable data collection
|
||||||
|
just for {es}:
|
||||||
|
|
||||||
|
[source,yaml]
|
||||||
|
---------------------------------------------------
|
||||||
|
xpack.monitoring.collection.enabled: true
|
||||||
|
xpack.monitoring.elasticsearch.collection.enabled: false
|
||||||
|
---------------------------------------------------
|
||||||
|
|
||||||
If you want to separately disable a specific exporter, you can specify the
|
If you want to separately disable a specific exporter, you can specify the
|
||||||
`enabled` setting (which defaults to `true`) per exporter. For example:
|
`enabled` setting (which defaults to `true`) per exporter. For example:
|
||||||
|
|
||||||
|
@ -83,8 +83,8 @@ parameter, which can be set to:
|
|||||||
were one big field. Looks for each word in *any*
|
were one big field. Looks for each word in *any*
|
||||||
field. See <<type-cross-fields>>.
|
field. See <<type-cross-fields>>.
|
||||||
|
|
||||||
`phrase`:: Runs a `match_phrase` query on each field and combines
|
`phrase`:: Runs a `match_phrase` query on each field and uses the `_score`
|
||||||
the `_score` from each field. See <<type-phrase>>.
|
from the best field. See <<type-phrase>>.
|
||||||
|
|
||||||
`phrase_prefix`:: Runs a `match_phrase_prefix` query on each field and
|
`phrase_prefix`:: Runs a `match_phrase_prefix` query on each field and
|
||||||
combines the `_score` from each field. See <<type-phrase>>.
|
combines the `_score` from each field. See <<type-phrase>>.
|
||||||
|
@ -79,8 +79,8 @@ next batch of results until there are no more results left to return, ie the
|
|||||||
`hits` array is empty.
|
`hits` array is empty.
|
||||||
|
|
||||||
IMPORTANT: The initial search request and each subsequent scroll request each
|
IMPORTANT: The initial search request and each subsequent scroll request each
|
||||||
return a `_scroll_id`, which may change with each request -- only the most
|
return a `_scroll_id`. While the `_scroll_id` may change between requests, it doesn’t
|
||||||
recent `_scroll_id` should be used.
|
always change — in any case, only the most recently received `_scroll_id` should be used.
|
||||||
|
|
||||||
NOTE: If the request specifies aggregations, only the initial search response
|
NOTE: If the request specifies aggregations, only the initial search response
|
||||||
will contain the aggregations results.
|
will contain the aggregations results.
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[[search-request-sort]]
|
[[search-request-sort]]
|
||||||
=== Sort
|
=== Sort
|
||||||
|
|
||||||
Allows to add one or more sort on specific fields. Each sort can be
|
Allows you to add one or more sorts on specific fields. Each sort can be
|
||||||
reversed as well. The sort is defined on a per field level, with special
|
reversed as well. The sort is defined on a per field level, with special
|
||||||
field name for `_score` to sort by score, and `_doc` to sort by index order.
|
field name for `_score` to sort by score, and `_doc` to sort by index order.
|
||||||
|
|
||||||
@ -223,7 +223,7 @@ scripts and sorting by geo distance.
|
|||||||
==== Missing Values
|
==== Missing Values
|
||||||
|
|
||||||
The `missing` parameter specifies how docs which are missing
|
The `missing` parameter specifies how docs which are missing
|
||||||
the field should be treated: The `missing` value can be
|
the sort field should be treated: The `missing` value can be
|
||||||
set to `_last`, `_first`, or a custom value (that
|
set to `_last`, `_first`, or a custom value (that
|
||||||
will be used for missing docs as the sort value).
|
will be used for missing docs as the sort value).
|
||||||
The default is `_last`.
|
The default is `_last`.
|
||||||
@ -250,7 +250,7 @@ the `nested_filter` then a missing value is used.
|
|||||||
==== Ignoring Unmapped Fields
|
==== Ignoring Unmapped Fields
|
||||||
|
|
||||||
By default, the search request will fail if there is no mapping
|
By default, the search request will fail if there is no mapping
|
||||||
associated with a field. The `unmapped_type` option allows to ignore
|
associated with a field. The `unmapped_type` option allows you to ignore
|
||||||
fields that have no mapping and not sort by them. The value of this
|
fields that have no mapping and not sort by them. The value of this
|
||||||
parameter is used to determine what sort values to emit. Here is an
|
parameter is used to determine what sort values to emit. Here is an
|
||||||
example of how it can be used:
|
example of how it can be used:
|
||||||
@ -322,7 +322,7 @@ GET /_search
|
|||||||
`ignore_unmapped`::
|
`ignore_unmapped`::
|
||||||
|
|
||||||
Indicates if the unmapped field should be treated as a missing value. Setting it to `true` is equivalent to specifying
|
Indicates if the unmapped field should be treated as a missing value. Setting it to `true` is equivalent to specifying
|
||||||
an `unmapped_type` in the field sort. The default is `false` (unmapped field are causing the search to fail).
|
an `unmapped_type` in the field sort. The default is `false` (unmapped field cause the search to fail).
|
||||||
|
|
||||||
NOTE: geo distance sorting does not support configurable missing values: the
|
NOTE: geo distance sorting does not support configurable missing values: the
|
||||||
distance will always be considered equal to +Infinity+ when a document does not
|
distance will always be considered equal to +Infinity+ when a document does not
|
||||||
|
@ -66,6 +66,17 @@ option in `kibana.yml` to the same value.
|
|||||||
You can update this setting through the
|
You can update this setting through the
|
||||||
<<cluster-update-settings,Cluster Update Settings API>>.
|
<<cluster-update-settings,Cluster Update Settings API>>.
|
||||||
|
|
||||||
|
`xpack.monitoring.elasticsearch.collection.enabled`::
|
||||||
|
|
||||||
|
Controls whether statistics about your {es} cluster should be collected. Defaults to `true`.
|
||||||
|
This is different from xpack.monitoring.collection.enabled, which allows you to enable or disable
|
||||||
|
all monitoring collection. However, this setting simply disables the collection of Elasticsearch
|
||||||
|
data while still allowing other data (e.g., Kibana, Logstash, Beats, or APM Server monitoring data)
|
||||||
|
to pass through this cluster.
|
||||||
|
+
|
||||||
|
You can update this setting through the
|
||||||
|
<<cluster-update-settings,Cluster Update Settings API>>.
|
||||||
|
|
||||||
`xpack.monitoring.collection.cluster.stats.timeout`::
|
`xpack.monitoring.collection.cluster.stats.timeout`::
|
||||||
|
|
||||||
Sets the timeout for collecting the cluster statistics. Defaults to `10s`.
|
Sets the timeout for collecting the cluster statistics. Defaults to `10s`.
|
||||||
|
@ -60,7 +60,7 @@ The maximum period of inactivity between two data packets, before the
|
|||||||
request is aborted.
|
request is aborted.
|
||||||
|
|
||||||
`xpack.http.max_response_size`::
|
`xpack.http.max_response_size`::
|
||||||
Specifies the maximum size a HTTP response is allowed to have, defaults to
|
Specifies the maximum size an HTTP response is allowed to have, defaults to
|
||||||
`10mb`, the maximum configurable value is `50mb`.
|
`10mb`, the maximum configurable value is `50mb`.
|
||||||
|
|
||||||
[[ssl-notification-settings]]
|
[[ssl-notification-settings]]
|
||||||
|
@ -78,7 +78,7 @@ The roles to associate with the anonymous user. Required.
|
|||||||
When `true`, an HTTP 403 response is returned if the anonymous user
|
When `true`, an HTTP 403 response is returned if the anonymous user
|
||||||
does not have the appropriate permissions for the requested action. The
|
does not have the appropriate permissions for the requested action. The
|
||||||
user is not prompted to provide credentials to access the requested
|
user is not prompted to provide credentials to access the requested
|
||||||
resource. When set to `false`, a HTTP 401 is returned and the user
|
resource. When set to `false`, an HTTP 401 response is returned and the user
|
||||||
can provide credentials with the appropriate permissions to gain
|
can provide credentials with the appropriate permissions to gain
|
||||||
access. Defaults to `true`.
|
access. Defaults to `true`.
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ name, node name and roles to be set, in addition to memory and network settings:
|
|||||||
[[msi-installer-configuration]]
|
[[msi-installer-configuration]]
|
||||||
image::images/msi_installer/msi_installer_configuration.png[]
|
image::images/msi_installer/msi_installer_configuration.png[]
|
||||||
|
|
||||||
A list of common plugins that can be downloaded and installed as part of the installation, with the option to configure a HTTPS proxy through which to download these plugins.
|
A list of common plugins that can be downloaded and installed as part of the installation, with the option to configure an HTTPS proxy through which to download these plugins.
|
||||||
|
|
||||||
TIP: Ensure the installation machine has access to the internet and that any corporate firewalls in place are configured to allow downloads from `artifacts.elastic.co`:
|
TIP: Ensure the installation machine has access to the internet and that any corporate firewalls in place are configured to allow downloads from `artifacts.elastic.co`:
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ client.search index: 'my-index', body: { query: { match: { title: 'test' } } }
|
|||||||
The `elasticsearch` gem combines two separate Rubygems:
|
The `elasticsearch` gem combines two separate Rubygems:
|
||||||
|
|
||||||
* https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-transport[`elasticsearch-transport`]
|
* https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-transport[`elasticsearch-transport`]
|
||||||
provides a HTTP Ruby client for connecting to the Elasticsearch cluster,
|
provides an HTTP Ruby client for connecting to the Elasticsearch cluster,
|
||||||
|
|
||||||
* https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-api[`elasticsearch-api`]
|
* https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-api[`elasticsearch-api`]
|
||||||
provides a Ruby API for the Elasticsearch RESTful API.
|
provides a Ruby API for the Elasticsearch RESTful API.
|
||||||
@ -94,7 +94,7 @@ Please see their respective documentation for configuration options and technica
|
|||||||
Notably, the documentation and comprehensive examples for all the API methods is contained in the source,
|
Notably, the documentation and comprehensive examples for all the API methods is contained in the source,
|
||||||
and available online at http://rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions[Rubydoc].
|
and available online at http://rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions[Rubydoc].
|
||||||
|
|
||||||
Keep in mind, that for optimal performance, you should use a HTTP library which supports
|
Keep in mind, that for optimal performance, you should use an HTTP library which supports
|
||||||
persistent ("keep-alive") HTTP connections.
|
persistent ("keep-alive") HTTP connections.
|
||||||
|
|
||||||
|
|
||||||
|
@ -1 +0,0 @@
|
|||||||
58b9db095c569b4c4da491810f14e1429878b594
|
|
@ -0,0 +1 @@
|
|||||||
|
cc072b68aac06a2fb9569ab7adce05302f130948
|
@ -25,7 +25,6 @@ import org.apache.lucene.expressions.SimpleBindings;
|
|||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.search.DoubleValues;
|
import org.apache.lucene.search.DoubleValues;
|
||||||
import org.apache.lucene.search.DoubleValuesSource;
|
import org.apache.lucene.search.DoubleValuesSource;
|
||||||
import org.apache.lucene.search.Scorer;
|
|
||||||
import org.elasticsearch.script.GeneralScriptException;
|
import org.elasticsearch.script.GeneralScriptException;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
|
|
||||||
@ -42,8 +41,6 @@ class ExpressionSearchScript implements SearchScript.LeafFactory {
|
|||||||
final DoubleValuesSource source;
|
final DoubleValuesSource source;
|
||||||
final ReplaceableConstDoubleValueSource specialValue; // _value
|
final ReplaceableConstDoubleValueSource specialValue; // _value
|
||||||
final boolean needsScores;
|
final boolean needsScores;
|
||||||
Scorer scorer;
|
|
||||||
int docid;
|
|
||||||
|
|
||||||
ExpressionSearchScript(Expression e, SimpleBindings b, ReplaceableConstDoubleValueSource v, boolean needsScores) {
|
ExpressionSearchScript(Expression e, SimpleBindings b, ReplaceableConstDoubleValueSource v, boolean needsScores) {
|
||||||
exprScript = e;
|
exprScript = e;
|
||||||
|
@ -582,7 +582,7 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
|||||||
String message = e.getMessage();
|
String message = e.getMessage();
|
||||||
assertThat(message + " should have contained failed to execute", message.contains("failed to execute"), equalTo(true));
|
assertThat(message + " should have contained failed to execute", message.contains("failed to execute"), equalTo(true));
|
||||||
message = e.getCause().getMessage();
|
message = e.getCause().getMessage();
|
||||||
assertThat(message + " should have contained not supported", message.contains("not supported"), equalTo(true));
|
assertThat(message, equalTo("Failed to compile inline script [0] using lang [expression]"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ public class StoredExpressionTests extends ESIntegTestCase {
|
|||||||
fail("update script should have been rejected");
|
fail("update script should have been rejected");
|
||||||
} catch(Exception e) {
|
} catch(Exception e) {
|
||||||
assertThat(e.getMessage(), containsString("failed to execute script"));
|
assertThat(e.getMessage(), containsString("failed to execute script"));
|
||||||
assertThat(e.getCause().getMessage(), containsString("scripts of type [stored], operation [update] and lang [expression] are not supported"));
|
assertThat(e.getCause().getMessage(), containsString("Failed to compile stored script [script1] using lang [expression]"));
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
client().prepareSearch()
|
client().prepareSearch()
|
||||||
|
@ -25,7 +25,6 @@ esplugin {
|
|||||||
integTestCluster {
|
integTestCluster {
|
||||||
module project.project(':modules:mapper-extras')
|
module project.project(':modules:mapper-extras')
|
||||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||||
systemProperty 'es.http.cname_in_publish_address', 'true'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.nullValue;
|
|||||||
public class Netty4CorsTests extends ESTestCase {
|
public class Netty4CorsTests extends ESTestCase {
|
||||||
|
|
||||||
public void testCorsEnabledWithoutAllowOrigins() {
|
public void testCorsEnabledWithoutAllowOrigins() {
|
||||||
// Set up a HTTP transport with only the CORS enabled setting
|
// Set up an HTTP transport with only the CORS enabled setting
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true)
|
.put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true)
|
||||||
.build();
|
.build();
|
||||||
@ -57,7 +57,7 @@ public class Netty4CorsTests extends ESTestCase {
|
|||||||
|
|
||||||
public void testCorsEnabledWithAllowOrigins() {
|
public void testCorsEnabledWithAllowOrigins() {
|
||||||
final String originValue = "remote-host";
|
final String originValue = "remote-host";
|
||||||
// create a http transport with CORS enabled and allow origin configured
|
// create an HTTP transport with CORS enabled and allow origin configured
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||||
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
|
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
|
||||||
@ -72,7 +72,7 @@ public class Netty4CorsTests extends ESTestCase {
|
|||||||
public void testCorsAllowOriginWithSameHost() {
|
public void testCorsAllowOriginWithSameHost() {
|
||||||
String originValue = "remote-host";
|
String originValue = "remote-host";
|
||||||
String host = "remote-host";
|
String host = "remote-host";
|
||||||
// create a http transport with CORS enabled
|
// create an HTTP transport with CORS enabled
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||||
.build();
|
.build();
|
||||||
|
@ -44,7 +44,7 @@ import java.util.Collections;
|
|||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This test checks, if a HTTP look-alike request (starting with a HTTP method and a space)
|
* This test checks, if an HTTP look-alike request (starting with an HTTP method and a space)
|
||||||
* actually returns text response instead of just dropping the connection
|
* actually returns text response instead of just dropping the connection
|
||||||
*/
|
*/
|
||||||
public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase {
|
public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase {
|
||||||
@ -91,7 +91,7 @@ public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase {
|
|||||||
socket.getOutputStream().flush();
|
socket.getOutputStream().flush();
|
||||||
|
|
||||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) {
|
try (BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) {
|
||||||
assertThat(reader.readLine(), is("This is not a HTTP port"));
|
assertThat(reader.readLine(), is("This is not an HTTP port"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1 +0,0 @@
|
|||||||
f009ee188453aabae77fad55aea08bc60323bb3e
|
|
@ -0,0 +1 @@
|
|||||||
|
429eb7e780c5a6e5200041a1f5b98bccd2623aaf
|
@ -1 +0,0 @@
|
|||||||
af3d2ae975e3560c1ea69222d6c46072857952ba
|
|
@ -0,0 +1 @@
|
|||||||
|
837fca1b1d7ca1dc002e53171801526644e52818
|
@ -1 +0,0 @@
|
|||||||
f17bc5e532d9dc2786a13bd577df64023d1baae1
|
|
@ -0,0 +1 @@
|
|||||||
|
1dde903172ade259cb26cbe320c25bc1d1356f89
|
@ -1 +0,0 @@
|
|||||||
7ad89d33c1cd960c91afa05b22024137fe108567
|
|
@ -0,0 +1 @@
|
|||||||
|
b6ca20e96a989e6e6706b8b7b8ad8c82d2a03576
|
@ -1 +0,0 @@
|
|||||||
3f11fb254256d74e911b953994b47e7a95915954
|
|
@ -0,0 +1 @@
|
|||||||
|
c96a2f25dea18b383423a41aca296734353d4bbd
|
@ -1 +0,0 @@
|
|||||||
b2348d140ef0c3e674cb81173f61c5e5f430facb
|
|
@ -0,0 +1 @@
|
|||||||
|
09363c5ce111d024a6da22a5ea8dbaf54d91dbd0
|
@ -1 +0,0 @@
|
|||||||
485a0c3be58a5942b4a28639f1019181ef4cd0e3
|
|
@ -0,0 +1 @@
|
|||||||
|
13c3840d49480014118de99ef6e07a9e55c50172
|
@ -1,61 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import org.elasticsearch.gradle.test.ClusterConfiguration
|
|
||||||
import org.elasticsearch.gradle.test.ClusterFormationTasks
|
|
||||||
import org.elasticsearch.gradle.test.NodeInfo
|
|
||||||
|
|
||||||
esplugin {
|
|
||||||
description 'Discovery file plugin enables unicast discovery from hosts stored in a file.'
|
|
||||||
classname 'org.elasticsearch.discovery.file.FileBasedDiscoveryPlugin'
|
|
||||||
}
|
|
||||||
|
|
||||||
bundlePlugin {
|
|
||||||
from('config/discovery-file') {
|
|
||||||
into 'config'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
task setupSeedNodeAndUnicastHostsFile(type: DefaultTask) {
|
|
||||||
mustRunAfter(precommit)
|
|
||||||
}
|
|
||||||
// setup the initial cluster with one node that will serve as the seed node
|
|
||||||
// for unicast discovery
|
|
||||||
ClusterConfiguration config = new ClusterConfiguration(project)
|
|
||||||
config.distribution = System.getProperty('tests.distribution', 'integ-test-zip')
|
|
||||||
config.clusterName = 'discovery-file-test-cluster'
|
|
||||||
List<NodeInfo> nodes = ClusterFormationTasks.setup(project, 'initialCluster', setupSeedNodeAndUnicastHostsFile, config)
|
|
||||||
File srcUnicastHostsFile = file('build/cluster/unicast_hosts.txt')
|
|
||||||
|
|
||||||
// write the unicast_hosts.txt file to a temporary location to be used by the second cluster
|
|
||||||
setupSeedNodeAndUnicastHostsFile.doLast {
|
|
||||||
// write the unicast_hosts.txt file to a temp file in the build directory
|
|
||||||
srcUnicastHostsFile.setText(nodes.get(0).transportUri(), 'UTF-8')
|
|
||||||
}
|
|
||||||
|
|
||||||
// second cluster, which will connect to the first via the unicast_hosts.txt file
|
|
||||||
integTestCluster {
|
|
||||||
dependsOn setupSeedNodeAndUnicastHostsFile
|
|
||||||
clusterName = 'discovery-file-test-cluster'
|
|
||||||
setting 'discovery.zen.hosts_provider', 'file'
|
|
||||||
extraConfigFile 'discovery-file/unicast_hosts.txt', srcUnicastHostsFile
|
|
||||||
}
|
|
||||||
|
|
||||||
integTestRunner.finalizedBy ':plugins:discovery-file:initialCluster#stop'
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
|||||||
# The unicast_hosts.txt file contains the list of unicast hosts to connect to
|
|
||||||
# for pinging during the discovery process, when using the file-based discovery
|
|
||||||
# mechanism. This file should contain one entry per line, where an entry is a
|
|
||||||
# host/port combination. The host and port should be separated by a `:`. If
|
|
||||||
# the port is left off, a default port of 9300 is assumed. For example, if the
|
|
||||||
# cluster has three nodes that participate in the discovery process:
|
|
||||||
# (1) 66.77.88.99 running on port 9300 (2) 66.77.88.100 running on port 9305
|
|
||||||
# and (3) 66.77.88.99 running on port 10005, then this file should contain the
|
|
||||||
# following text:
|
|
||||||
#
|
|
||||||
#10.10.10.5
|
|
||||||
#10.10.10.6:9305
|
|
||||||
#10.10.10.5:10005
|
|
||||||
#
|
|
||||||
# For IPv6 addresses, make sure to put a bracket around the host part of the address,
|
|
||||||
# for example: [2001:cdba:0000:0000:0000:0000:3257:9652]:9301 (where 9301 is the port).
|
|
||||||
#
|
|
||||||
# NOTE: all lines starting with a `#` are comments, and comments must exist
|
|
||||||
# on lines of their own (i.e. comments cannot begin in the middle of a line)
|
|
||||||
#
|
|
@ -1,51 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.discovery.file;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
|
||||||
import org.elasticsearch.common.network.NetworkService;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.discovery.zen.UnicastHostsProvider;
|
|
||||||
import org.elasticsearch.plugins.DiscoveryPlugin;
|
|
||||||
import org.elasticsearch.plugins.Plugin;
|
|
||||||
import org.elasticsearch.transport.TransportService;
|
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
|
|
||||||
public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin {
|
|
||||||
|
|
||||||
private final DeprecationLogger deprecationLogger;
|
|
||||||
static final String DEPRECATION_MESSAGE
|
|
||||||
= "File-based discovery is now built into Elasticsearch and does not require the discovery-file plugin";
|
|
||||||
|
|
||||||
public FileBasedDiscoveryPlugin(Settings settings) {
|
|
||||||
deprecationLogger = new DeprecationLogger(Loggers.getLogger(this.getClass(), settings));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<String, Supplier<UnicastHostsProvider>> getZenHostsProviders(TransportService transportService,
|
|
||||||
NetworkService networkService) {
|
|
||||||
deprecationLogger.deprecated(DEPRECATION_MESSAGE);
|
|
||||||
return Collections.emptyMap();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
# Integration tests for file-based discovery
|
|
||||||
#
|
|
||||||
"Ensure cluster formed successfully with discovery file":
|
|
||||||
# make sure both nodes joined the cluster
|
|
||||||
- do:
|
|
||||||
cluster.health:
|
|
||||||
wait_for_nodes: 2
|
|
||||||
|
|
||||||
# make sure the cluster was formed with the correct name
|
|
||||||
- do:
|
|
||||||
cluster.state: {}
|
|
||||||
|
|
||||||
- match: { cluster_name: 'discovery-file-test-cluster' } # correct cluster name, we formed the cluster we expected to
|
|
@ -17,16 +17,7 @@
|
|||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.discovery.file;
|
esplugin {
|
||||||
|
description 'The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.'
|
||||||
import org.elasticsearch.common.settings.Settings;
|
classname 'org.elasticsearch.plugin.mapper.AnnotatedTextPlugin'
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
|
|
||||||
import static org.elasticsearch.discovery.file.FileBasedDiscoveryPlugin.DEPRECATION_MESSAGE;
|
|
||||||
|
|
||||||
public class FileBasedDiscoveryPluginDeprecationTests extends ESTestCase {
|
|
||||||
public void testDeprecationWarning() {
|
|
||||||
new FileBasedDiscoveryPlugin(Settings.EMPTY).getZenHostsProviders(null, null);
|
|
||||||
assertWarnings(DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
}
|
}
|
@ -0,0 +1,776 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.mapper.annotatedtext;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.apache.lucene.analysis.Analyzer.TokenStreamComponents;
|
||||||
|
import org.apache.lucene.analysis.AnalyzerWrapper;
|
||||||
|
import org.apache.lucene.analysis.TokenFilter;
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
|
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
|
||||||
|
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.index.IndexOptions;
|
||||||
|
import org.apache.lucene.index.IndexableField;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.MultiPhraseQuery;
|
||||||
|
import org.apache.lucene.search.NormsFieldExistsQuery;
|
||||||
|
import org.apache.lucene.search.PhraseQuery;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
|
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||||
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
|
import org.elasticsearch.index.mapper.Mapper;
|
||||||
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext;
|
||||||
|
import org.elasticsearch.index.mapper.StringFieldType;
|
||||||
|
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
|
import java.net.URLDecoder;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import static org.elasticsearch.index.mapper.TypeParsers.parseTextField;
|
||||||
|
|
||||||
|
/** A {@link FieldMapper} for full-text fields with annotation markup e.g.
|
||||||
|
*
|
||||||
|
* "New mayor is [John Smith](type=person&value=John%20Smith) "
|
||||||
|
*
|
||||||
|
* A special Analyzer wraps the default choice of analyzer in order
|
||||||
|
* to strip the text field of annotation markup and inject the related
|
||||||
|
* entity annotation tokens as supplementary tokens at the relevant points
|
||||||
|
* in the token stream.
|
||||||
|
* This code is largely a copy of TextFieldMapper which is less than ideal -
|
||||||
|
* my attempts to subclass TextFieldMapper failed but we can revisit this.
|
||||||
|
**/
|
||||||
|
public class AnnotatedTextFieldMapper extends FieldMapper {
|
||||||
|
|
||||||
|
public static final String CONTENT_TYPE = "annotated_text";
|
||||||
|
private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1;
|
||||||
|
|
||||||
|
public static class Defaults {
|
||||||
|
public static final MappedFieldType FIELD_TYPE = new AnnotatedTextFieldType();
|
||||||
|
static {
|
||||||
|
FIELD_TYPE.freeze();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder extends FieldMapper.Builder<Builder, AnnotatedTextFieldMapper> {
|
||||||
|
|
||||||
|
private int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER;
|
||||||
|
|
||||||
|
public Builder(String name) {
|
||||||
|
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||||
|
builder = this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AnnotatedTextFieldType fieldType() {
|
||||||
|
return (AnnotatedTextFieldType) super.fieldType();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder positionIncrementGap(int positionIncrementGap) {
|
||||||
|
if (positionIncrementGap < 0) {
|
||||||
|
throw new MapperParsingException("[positions_increment_gap] must be positive, got " + positionIncrementGap);
|
||||||
|
}
|
||||||
|
this.positionIncrementGap = positionIncrementGap;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Builder docValues(boolean docValues) {
|
||||||
|
if (docValues) {
|
||||||
|
throw new IllegalArgumentException("[" + CONTENT_TYPE + "] fields do not support doc values");
|
||||||
|
}
|
||||||
|
return super.docValues(docValues);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AnnotatedTextFieldMapper build(BuilderContext context) {
|
||||||
|
if (fieldType().indexOptions() == IndexOptions.NONE ) {
|
||||||
|
throw new IllegalArgumentException("[" + CONTENT_TYPE + "] fields must be indexed");
|
||||||
|
}
|
||||||
|
if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
|
||||||
|
if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) {
|
||||||
|
throw new IllegalArgumentException("Cannot set position_increment_gap on field ["
|
||||||
|
+ name + "] without positions enabled");
|
||||||
|
}
|
||||||
|
fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap));
|
||||||
|
fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap));
|
||||||
|
fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap));
|
||||||
|
} else {
|
||||||
|
//Using the analyzer's default BUT need to do the same thing AnalysisRegistry.processAnalyzerFactory
|
||||||
|
// does to splice in new default of posIncGap=100 by wrapping the analyzer
|
||||||
|
if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) {
|
||||||
|
int overrideInc = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
|
||||||
|
fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), overrideInc));
|
||||||
|
fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), overrideInc));
|
||||||
|
fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(),overrideInc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
setupFieldType(context);
|
||||||
|
return new AnnotatedTextFieldMapper(
|
||||||
|
name, fieldType(), defaultFieldType, positionIncrementGap,
|
||||||
|
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class TypeParser implements Mapper.TypeParser {
|
||||||
|
@Override
|
||||||
|
public Mapper.Builder<AnnotatedTextFieldMapper.Builder, AnnotatedTextFieldMapper> parse(
|
||||||
|
String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||||
|
AnnotatedTextFieldMapper.Builder builder = new AnnotatedTextFieldMapper.Builder(fieldName);
|
||||||
|
|
||||||
|
builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer());
|
||||||
|
builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer());
|
||||||
|
builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer());
|
||||||
|
parseTextField(builder, fieldName, node, parserContext);
|
||||||
|
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||||
|
Map.Entry<String, Object> entry = iterator.next();
|
||||||
|
String propName = entry.getKey();
|
||||||
|
Object propNode = entry.getValue();
|
||||||
|
if (propName.equals("position_increment_gap")) {
|
||||||
|
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
|
||||||
|
builder.positionIncrementGap(newPositionIncrementGap);
|
||||||
|
iterator.remove();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses markdown-like syntax into plain text and AnnotationTokens with offsets for
|
||||||
|
* annotations found in texts
|
||||||
|
*/
|
||||||
|
public static final class AnnotatedText {
|
||||||
|
public final String textPlusMarkup;
|
||||||
|
public final String textMinusMarkup;
|
||||||
|
List<AnnotationToken> annotations;
|
||||||
|
|
||||||
|
// Format is markdown-like syntax for URLs eg:
|
||||||
|
// "New mayor is [John Smith](type=person&value=John%20Smith) "
|
||||||
|
static Pattern markdownPattern = Pattern.compile("\\[([^\\]\\[]*)\\]\\(([^\\)\\(]*)\\)");
|
||||||
|
|
||||||
|
public static AnnotatedText parse (String textPlusMarkup) {
|
||||||
|
List<AnnotationToken> annotations =new ArrayList<>();
|
||||||
|
Matcher m = markdownPattern.matcher(textPlusMarkup);
|
||||||
|
int lastPos = 0;
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
while(m.find()){
|
||||||
|
if(m.start() > lastPos){
|
||||||
|
sb.append(textPlusMarkup.substring(lastPos, m.start()));
|
||||||
|
}
|
||||||
|
|
||||||
|
int startOffset = sb.length();
|
||||||
|
int endOffset = sb.length() + m.group(1).length();
|
||||||
|
sb.append(m.group(1));
|
||||||
|
lastPos = m.end();
|
||||||
|
|
||||||
|
String[] pairs = m.group(2).split("&");
|
||||||
|
String value = null;
|
||||||
|
for (String pair : pairs) {
|
||||||
|
String[] kv = pair.split("=");
|
||||||
|
try {
|
||||||
|
if(kv.length == 2){
|
||||||
|
throw new ElasticsearchParseException("key=value pairs are not supported in annotations");
|
||||||
|
}
|
||||||
|
if(kv.length == 1) {
|
||||||
|
//Check "=" sign wasn't in the pair string
|
||||||
|
if(kv[0].length() == pair.length()) {
|
||||||
|
//untyped value
|
||||||
|
value = URLDecoder.decode(kv[0], "UTF-8");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (value!=null && value.length() > 0) {
|
||||||
|
annotations.add(new AnnotationToken(startOffset, endOffset, value));
|
||||||
|
}
|
||||||
|
} catch (UnsupportedEncodingException uee){
|
||||||
|
throw new ElasticsearchParseException("Unsupported encoding parsing annotated text", uee);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(lastPos < textPlusMarkup.length()){
|
||||||
|
sb.append(textPlusMarkup.substring(lastPos));
|
||||||
|
}
|
||||||
|
return new AnnotatedText(sb.toString(), textPlusMarkup, annotations);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected AnnotatedText(String textMinusMarkup, String textPlusMarkup, List<AnnotationToken> annotations) {
|
||||||
|
this.textMinusMarkup = textMinusMarkup;
|
||||||
|
this.textPlusMarkup = textPlusMarkup;
|
||||||
|
this.annotations = annotations;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static final class AnnotationToken {
|
||||||
|
public final int offset;
|
||||||
|
public final int endOffset;
|
||||||
|
|
||||||
|
public final String value;
|
||||||
|
public AnnotationToken(int offset, int endOffset, String value) {
|
||||||
|
this.offset = offset;
|
||||||
|
this.endOffset = endOffset;
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return value +" ("+offset+" - "+endOffset+")";
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean intersects(int start, int end) {
|
||||||
|
return (start <= offset && end >= offset) || (start <= endOffset && end >= endOffset)
|
||||||
|
|| (start >= offset && end <= endOffset);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
final int prime = 31;
|
||||||
|
int result = 1;
|
||||||
|
result = prime * result + endOffset;
|
||||||
|
result = prime * result + offset;
|
||||||
|
result = prime * result + Objects.hashCode(value);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj)
|
||||||
|
return true;
|
||||||
|
if (obj == null)
|
||||||
|
return false;
|
||||||
|
if (getClass() != obj.getClass())
|
||||||
|
return false;
|
||||||
|
AnnotationToken other = (AnnotationToken) obj;
|
||||||
|
return Objects.equals(endOffset, other.endOffset) && Objects.equals(offset, other.offset)
|
||||||
|
&& Objects.equals(value, other.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append(textMinusMarkup);
|
||||||
|
sb.append("\n");
|
||||||
|
annotations.forEach(a -> {sb.append(a); sb.append("\n");});
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int numAnnotations() {
|
||||||
|
return annotations.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
public AnnotationToken getAnnotation(int index) {
|
||||||
|
return annotations.get(index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A utility class for use with highlighters where the content being highlighted
|
||||||
|
// needs plain text format for highlighting but marked-up format for token discovery.
|
||||||
|
// The class takes markedup format field values and returns plain text versions.
|
||||||
|
// When asked to tokenize plain-text versions by the highlighter it tokenizes the
|
||||||
|
// original markup form in order to inject annotations.
|
||||||
|
public static final class AnnotatedHighlighterAnalyzer extends AnalyzerWrapper {
|
||||||
|
private Analyzer delegate;
|
||||||
|
private AnnotatedText[] annotations;
|
||||||
|
public AnnotatedHighlighterAnalyzer(Analyzer delegate){
|
||||||
|
super(delegate.getReuseStrategy());
|
||||||
|
this.delegate = delegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void init(String[] markedUpFieldValues) {
|
||||||
|
this.annotations = new AnnotatedText[markedUpFieldValues.length];
|
||||||
|
for (int i = 0; i < markedUpFieldValues.length; i++) {
|
||||||
|
annotations[i] = AnnotatedText.parse(markedUpFieldValues[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String [] getPlainTextValuesForHighlighter(){
|
||||||
|
String [] result = new String[annotations.length];
|
||||||
|
for (int i = 0; i < annotations.length; i++) {
|
||||||
|
result[i] = annotations[i].textMinusMarkup;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public AnnotationToken[] getIntersectingAnnotations(int start, int end) {
|
||||||
|
List<AnnotationToken> intersectingAnnotations = new ArrayList<>();
|
||||||
|
int fieldValueOffset =0;
|
||||||
|
for (AnnotatedText fieldValueAnnotations : this.annotations) {
|
||||||
|
//This is called from a highlighter where all of the field values are concatenated
|
||||||
|
// so each annotation offset will need to be adjusted so that it takes into account
|
||||||
|
// the previous values AND the MULTIVAL delimiter
|
||||||
|
for (AnnotationToken token : fieldValueAnnotations.annotations) {
|
||||||
|
if(token.intersects(start - fieldValueOffset , end - fieldValueOffset)) {
|
||||||
|
intersectingAnnotations.add(new AnnotationToken(token.offset + fieldValueOffset,
|
||||||
|
token.endOffset + fieldValueOffset, token.value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//add 1 for the fieldvalue separator character
|
||||||
|
fieldValueOffset +=fieldValueAnnotations.textMinusMarkup.length() +1;
|
||||||
|
}
|
||||||
|
return intersectingAnnotations.toArray(new AnnotationToken[intersectingAnnotations.size()]);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Analyzer getWrappedAnalyzer(String fieldName) {
|
||||||
|
return delegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) {
|
||||||
|
if(components instanceof AnnotatedHighlighterTokenStreamComponents){
|
||||||
|
// already wrapped.
|
||||||
|
return components;
|
||||||
|
}
|
||||||
|
AnnotationsInjector injector = new AnnotationsInjector(components.getTokenStream());
|
||||||
|
return new AnnotatedHighlighterTokenStreamComponents(components.getTokenizer(), injector, this.annotations);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private static final class AnnotatedHighlighterTokenStreamComponents extends TokenStreamComponents{
|
||||||
|
|
||||||
|
private AnnotationsInjector annotationsInjector;
|
||||||
|
private AnnotatedText[] annotations;
|
||||||
|
int readerNum = 0;
|
||||||
|
|
||||||
|
AnnotatedHighlighterTokenStreamComponents(Tokenizer source, AnnotationsInjector annotationsFilter,
|
||||||
|
AnnotatedText[] annotations) {
|
||||||
|
super(source, annotationsFilter);
|
||||||
|
this.annotationsInjector = annotationsFilter;
|
||||||
|
this.annotations = annotations;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setReader(Reader reader) {
|
||||||
|
String plainText = readToString(reader);
|
||||||
|
AnnotatedText at = this.annotations[readerNum++];
|
||||||
|
assert at.textMinusMarkup.equals(plainText);
|
||||||
|
// This code is reliant on the behaviour of highlighter logic - it
|
||||||
|
// takes plain text multi-value fields and then calls the same analyzer
|
||||||
|
// for each field value in turn. This class has cached the annotations
|
||||||
|
// associated with each plain-text value and are arranged in the same order
|
||||||
|
annotationsInjector.setAnnotations(at);
|
||||||
|
super.setReader(new StringReader(at.textMinusMarkup));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static final class AnnotationAnalyzerWrapper extends AnalyzerWrapper {
|
||||||
|
|
||||||
|
|
||||||
|
private final Analyzer delegate;
|
||||||
|
|
||||||
|
public AnnotationAnalyzerWrapper (Analyzer delegate) {
|
||||||
|
super(delegate.getReuseStrategy());
|
||||||
|
this.delegate = delegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps {@link StandardAnalyzer}.
|
||||||
|
*/
|
||||||
|
public AnnotationAnalyzerWrapper() {
|
||||||
|
this(new StandardAnalyzer());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Analyzer getWrappedAnalyzer(String fieldName) {
|
||||||
|
return delegate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) {
|
||||||
|
if(components instanceof AnnotatedTokenStreamComponents){
|
||||||
|
// already wrapped.
|
||||||
|
return components;
|
||||||
|
}
|
||||||
|
AnnotationsInjector injector = new AnnotationsInjector(components.getTokenStream());
|
||||||
|
return new AnnotatedTokenStreamComponents(components.getTokenizer(), injector);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//This Analyzer is not "wrappable" because of a limitation in Lucene https://issues.apache.org/jira/browse/LUCENE-8352
|
||||||
|
private static final class AnnotatedTokenStreamComponents extends TokenStreamComponents{
|
||||||
|
private AnnotationsInjector annotationsInjector;
|
||||||
|
|
||||||
|
AnnotatedTokenStreamComponents(Tokenizer source, AnnotationsInjector annotationsInjector) {
|
||||||
|
super(source, annotationsInjector);
|
||||||
|
this.annotationsInjector = annotationsInjector;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setReader(Reader reader) {
|
||||||
|
// Sneaky code to change the content downstream components will parse.
|
||||||
|
// Replace the marked-up content Reader with a plain text Reader and prime the
|
||||||
|
// annotations injector with the AnnotatedTokens that need to be injected
|
||||||
|
// as plain-text parsing progresses.
|
||||||
|
AnnotatedText annotations = AnnotatedText.parse(readToString(reader));
|
||||||
|
annotationsInjector.setAnnotations(annotations);
|
||||||
|
super.setReader(new StringReader(annotations.textMinusMarkup));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static String readToString(Reader reader) {
|
||||||
|
char[] arr = new char[8 * 1024];
|
||||||
|
StringBuilder buffer = new StringBuilder();
|
||||||
|
int numCharsRead;
|
||||||
|
try {
|
||||||
|
while ((numCharsRead = reader.read(arr, 0, arr.length)) != -1) {
|
||||||
|
buffer.append(arr, 0, numCharsRead);
|
||||||
|
}
|
||||||
|
reader.close();
|
||||||
|
return buffer.toString();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException("IO Error reading field content", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static final class AnnotationsInjector extends TokenFilter {
|
||||||
|
|
||||||
|
private AnnotatedText annotatedText;
|
||||||
|
AnnotatedText.AnnotationToken nextAnnotationForInjection = null;
|
||||||
|
private int currentAnnotationIndex = 0;
|
||||||
|
List<State> pendingStates = new ArrayList<>();
|
||||||
|
int pendingStatePos = 0;
|
||||||
|
boolean inputExhausted = false;
|
||||||
|
|
||||||
|
private final OffsetAttribute textOffsetAtt = addAttribute(OffsetAttribute.class);
|
||||||
|
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
|
||||||
|
private final PositionIncrementAttribute posAtt = addAttribute(PositionIncrementAttribute.class);
|
||||||
|
private final PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class);
|
||||||
|
private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
|
||||||
|
|
||||||
|
public AnnotationsInjector(TokenStream in) {
|
||||||
|
super(in);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAnnotations(AnnotatedText annotatedText) {
|
||||||
|
this.annotatedText = annotatedText;
|
||||||
|
currentAnnotationIndex = 0;
|
||||||
|
if(annotatedText!=null && annotatedText.numAnnotations()>0){
|
||||||
|
nextAnnotationForInjection = annotatedText.getAnnotation(0);
|
||||||
|
} else {
|
||||||
|
nextAnnotationForInjection = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void reset() throws IOException {
|
||||||
|
pendingStates.clear();
|
||||||
|
pendingStatePos = 0;
|
||||||
|
inputExhausted = false;
|
||||||
|
super.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Abstracts if we are pulling from some pre-cached buffer of
|
||||||
|
// text tokens or directly from the wrapped TokenStream
|
||||||
|
private boolean internalNextToken() throws IOException{
|
||||||
|
if (pendingStatePos < pendingStates.size()){
|
||||||
|
restoreState(pendingStates.get(pendingStatePos));
|
||||||
|
pendingStatePos ++;
|
||||||
|
if(pendingStatePos >=pendingStates.size()){
|
||||||
|
pendingStatePos =0;
|
||||||
|
pendingStates.clear();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if(inputExhausted) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return input.incrementToken();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean incrementToken() throws IOException {
|
||||||
|
if (internalNextToken()) {
|
||||||
|
if (nextAnnotationForInjection != null) {
|
||||||
|
// If we are at the right point to inject an annotation....
|
||||||
|
if (textOffsetAtt.startOffset() >= nextAnnotationForInjection.offset) {
|
||||||
|
int firstSpannedTextPosInc = posAtt.getPositionIncrement();
|
||||||
|
int annotationPosLen = 1;
|
||||||
|
|
||||||
|
// Capture the text token's state for later replay - but
|
||||||
|
// with a zero pos increment so is same as annotation
|
||||||
|
// that is injected before it
|
||||||
|
posAtt.setPositionIncrement(0);
|
||||||
|
pendingStates.add(captureState());
|
||||||
|
|
||||||
|
while (textOffsetAtt.endOffset() <= nextAnnotationForInjection.endOffset) {
|
||||||
|
// Buffer up all the other tokens spanned by this annotation to determine length.
|
||||||
|
if (input.incrementToken()) {
|
||||||
|
if (textOffsetAtt.endOffset() <= nextAnnotationForInjection.endOffset
|
||||||
|
&& textOffsetAtt.startOffset() < nextAnnotationForInjection.endOffset) {
|
||||||
|
annotationPosLen += posAtt.getPositionIncrement();
|
||||||
|
}
|
||||||
|
pendingStates.add(captureState());
|
||||||
|
} else {
|
||||||
|
inputExhausted = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
emitAnnotation(firstSpannedTextPosInc, annotationPosLen);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
inputExhausted = true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private void setType(AnnotationToken token) {
|
||||||
|
//Default annotation type - in future AnnotationTokens may contain custom type info
|
||||||
|
typeAtt.setType("annotation");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void emitAnnotation(int firstSpannedTextPosInc, int annotationPosLen) throws IOException {
|
||||||
|
// Set the annotation's attributes
|
||||||
|
posLenAtt.setPositionLength(annotationPosLen);
|
||||||
|
textOffsetAtt.setOffset(nextAnnotationForInjection.offset, nextAnnotationForInjection.endOffset);
|
||||||
|
setType(nextAnnotationForInjection);
|
||||||
|
|
||||||
|
// We may have multiple annotations at this location - stack them up
|
||||||
|
final int annotationOffset = nextAnnotationForInjection.offset;
|
||||||
|
final AnnotatedText.AnnotationToken firstAnnotationAtThisPos = nextAnnotationForInjection;
|
||||||
|
while (nextAnnotationForInjection != null && nextAnnotationForInjection.offset == annotationOffset) {
|
||||||
|
|
||||||
|
|
||||||
|
setType(nextAnnotationForInjection);
|
||||||
|
termAtt.resizeBuffer(nextAnnotationForInjection.value.length());
|
||||||
|
termAtt.copyBuffer(nextAnnotationForInjection.value.toCharArray(), 0, nextAnnotationForInjection.value.length());
|
||||||
|
|
||||||
|
if (nextAnnotationForInjection == firstAnnotationAtThisPos) {
|
||||||
|
posAtt.setPositionIncrement(firstSpannedTextPosInc);
|
||||||
|
//Put at the head of the queue of tokens to be emitted
|
||||||
|
pendingStates.add(0, captureState());
|
||||||
|
} else {
|
||||||
|
posAtt.setPositionIncrement(0);
|
||||||
|
//Put after the head of the queue of tokens to be emitted
|
||||||
|
pendingStates.add(1, captureState());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Flag the inject annotation as null to prevent re-injection.
|
||||||
|
currentAnnotationIndex++;
|
||||||
|
if (currentAnnotationIndex < annotatedText.numAnnotations()) {
|
||||||
|
nextAnnotationForInjection = annotatedText.getAnnotation(currentAnnotationIndex);
|
||||||
|
} else {
|
||||||
|
nextAnnotationForInjection = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Now pop the first of many potential buffered tokens:
|
||||||
|
internalNextToken();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static final class AnnotatedTextFieldType extends StringFieldType {
|
||||||
|
|
||||||
|
public AnnotatedTextFieldType() {
|
||||||
|
setTokenized(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected AnnotatedTextFieldType(AnnotatedTextFieldType ref) {
|
||||||
|
super(ref);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setIndexAnalyzer(NamedAnalyzer delegate) {
|
||||||
|
if(delegate.analyzer() instanceof AnnotationAnalyzerWrapper){
|
||||||
|
// Already wrapped the Analyzer with an AnnotationAnalyzer
|
||||||
|
super.setIndexAnalyzer(delegate);
|
||||||
|
} else {
|
||||||
|
// Wrap the analyzer with an AnnotationAnalyzer that will inject required annotations
|
||||||
|
super.setIndexAnalyzer(new NamedAnalyzer(delegate.name(), AnalyzerScope.INDEX,
|
||||||
|
new AnnotationAnalyzerWrapper(delegate.analyzer())));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public AnnotatedTextFieldType clone() {
|
||||||
|
return new AnnotatedTextFieldType(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String typeName() {
|
||||||
|
return CONTENT_TYPE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Query existsQuery(QueryShardContext context) {
|
||||||
|
if (omitNorms()) {
|
||||||
|
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
|
||||||
|
} else {
|
||||||
|
return new NormsFieldExistsQuery(name());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePosIncrements) throws IOException {
|
||||||
|
PhraseQuery.Builder builder = new PhraseQuery.Builder();
|
||||||
|
builder.setSlop(slop);
|
||||||
|
|
||||||
|
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
|
||||||
|
PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
|
||||||
|
int position = -1;
|
||||||
|
|
||||||
|
stream.reset();
|
||||||
|
while (stream.incrementToken()) {
|
||||||
|
if (enablePosIncrements) {
|
||||||
|
position += posIncrAtt.getPositionIncrement();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
position += 1;
|
||||||
|
}
|
||||||
|
builder.add(new Term(field, termAtt.getBytesRef()), position);
|
||||||
|
}
|
||||||
|
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException {
|
||||||
|
|
||||||
|
MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder();
|
||||||
|
mpqb.setSlop(slop);
|
||||||
|
|
||||||
|
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
|
||||||
|
|
||||||
|
PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
|
||||||
|
int position = -1;
|
||||||
|
|
||||||
|
List<Term> multiTerms = new ArrayList<>();
|
||||||
|
stream.reset();
|
||||||
|
while (stream.incrementToken()) {
|
||||||
|
int positionIncrement = posIncrAtt.getPositionIncrement();
|
||||||
|
|
||||||
|
if (positionIncrement > 0 && multiTerms.size() > 0) {
|
||||||
|
if (enablePositionIncrements) {
|
||||||
|
mpqb.add(multiTerms.toArray(new Term[0]), position);
|
||||||
|
} else {
|
||||||
|
mpqb.add(multiTerms.toArray(new Term[0]));
|
||||||
|
}
|
||||||
|
multiTerms.clear();
|
||||||
|
}
|
||||||
|
position += positionIncrement;
|
||||||
|
multiTerms.add(new Term(field, termAtt.getBytesRef()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (enablePositionIncrements) {
|
||||||
|
mpqb.add(multiTerms.toArray(new Term[0]), position);
|
||||||
|
} else {
|
||||||
|
mpqb.add(multiTerms.toArray(new Term[0]));
|
||||||
|
}
|
||||||
|
return mpqb.build();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private int positionIncrementGap;
|
||||||
|
protected AnnotatedTextFieldMapper(String simpleName, AnnotatedTextFieldType fieldType, MappedFieldType defaultFieldType,
|
||||||
|
int positionIncrementGap,
|
||||||
|
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||||
|
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||||
|
assert fieldType.tokenized();
|
||||||
|
assert fieldType.hasDocValues() == false;
|
||||||
|
this.positionIncrementGap = positionIncrementGap;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AnnotatedTextFieldMapper clone() {
|
||||||
|
return (AnnotatedTextFieldMapper) super.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getPositionIncrementGap() {
|
||||||
|
return this.positionIncrementGap;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||||
|
final String value;
|
||||||
|
if (context.externalValueSet()) {
|
||||||
|
value = context.externalValue().toString();
|
||||||
|
} else {
|
||||||
|
value = context.parser().textOrNull();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||||
|
Field field = new Field(fieldType().name(), value, fieldType());
|
||||||
|
fields.add(field);
|
||||||
|
if (fieldType().omitNorms()) {
|
||||||
|
createFieldNamesField(context, fields);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String contentType() {
|
||||||
|
return CONTENT_TYPE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AnnotatedTextFieldType fieldType() {
|
||||||
|
return (AnnotatedTextFieldType) super.fieldType();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||||
|
super.doXContentBody(builder, includeDefaults, params);
|
||||||
|
doXContentAnalyzers(builder, includeDefaults);
|
||||||
|
|
||||||
|
if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
|
||||||
|
builder.field("position_increment_gap", positionIncrementGap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,44 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.plugin.mapper;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.elasticsearch.index.mapper.Mapper;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper;
|
||||||
|
import org.elasticsearch.plugins.MapperPlugin;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
|
import org.elasticsearch.plugins.SearchPlugin;
|
||||||
|
import org.elasticsearch.search.fetch.subphase.highlight.AnnotatedTextHighlighter;
|
||||||
|
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
|
||||||
|
|
||||||
|
public class AnnotatedTextPlugin extends Plugin implements MapperPlugin, SearchPlugin {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, Mapper.TypeParser> getMappers() {
|
||||||
|
return Collections.singletonMap(AnnotatedTextFieldMapper.CONTENT_TYPE, new AnnotatedTextFieldMapper.TypeParser());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, Highlighter> getHighlighters() {
|
||||||
|
return Collections.singletonMap(AnnotatedTextHighlighter.NAME, new AnnotatedTextHighlighter());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,201 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.fetch.subphase.highlight;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.highlight.Encoder;
|
||||||
|
import org.apache.lucene.search.uhighlight.Passage;
|
||||||
|
import org.apache.lucene.search.uhighlight.PassageFormatter;
|
||||||
|
import org.apache.lucene.search.uhighlight.Snippet;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken;
|
||||||
|
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
|
import java.net.URLEncoder;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Custom passage formatter that :
|
||||||
|
* 1) marks up search hits in markdown-like syntax for URLs ({@link Snippet})
|
||||||
|
* 2) injects any annotations from the original text that don't conflict with search hit highlighting
|
||||||
|
*/
|
||||||
|
public class AnnotatedPassageFormatter extends PassageFormatter {
|
||||||
|
|
||||||
|
|
||||||
|
public static final String SEARCH_HIT_TYPE = "_hit_term";
|
||||||
|
private final Encoder encoder;
|
||||||
|
private AnnotatedHighlighterAnalyzer annotatedHighlighterAnalyzer;
|
||||||
|
|
||||||
|
public AnnotatedPassageFormatter(AnnotatedHighlighterAnalyzer annotatedHighlighterAnalyzer, Encoder encoder) {
|
||||||
|
this.annotatedHighlighterAnalyzer = annotatedHighlighterAnalyzer;
|
||||||
|
this.encoder = encoder;
|
||||||
|
}
|
||||||
|
|
||||||
|
static class MarkupPassage {
|
||||||
|
List<Markup> markups = new ArrayList<>();
|
||||||
|
int lastMarkupEnd = -1;
|
||||||
|
|
||||||
|
public void addUnlessOverlapping(Markup newMarkup) {
|
||||||
|
|
||||||
|
// Fast exit.
|
||||||
|
if(newMarkup.start > lastMarkupEnd) {
|
||||||
|
markups.add(newMarkup);
|
||||||
|
lastMarkupEnd = newMarkup.end;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check to see if this new markup overlaps with any prior
|
||||||
|
int index=0;
|
||||||
|
for (Markup existingMarkup: markups) {
|
||||||
|
if(existingMarkup.samePosition(newMarkup)) {
|
||||||
|
existingMarkup.merge(newMarkup);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if(existingMarkup.overlaps(newMarkup)) {
|
||||||
|
// existing markup wins - we throw away the new markup that would span this position
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// markup list is in start offset order so we can insert at this position then shift others right
|
||||||
|
if(existingMarkup.isAfter(newMarkup)) {
|
||||||
|
markups.add(index, newMarkup);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
index++;
|
||||||
|
}
|
||||||
|
markups.add(newMarkup);
|
||||||
|
lastMarkupEnd = newMarkup.end;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
static class Markup {
|
||||||
|
int start;
|
||||||
|
int end;
|
||||||
|
String metadata;
|
||||||
|
Markup(int start, int end, String metadata) {
|
||||||
|
super();
|
||||||
|
this.start = start;
|
||||||
|
this.end = end;
|
||||||
|
this.metadata = metadata;
|
||||||
|
}
|
||||||
|
boolean isAfter(Markup other) {
|
||||||
|
return start > other.end;
|
||||||
|
}
|
||||||
|
void merge(Markup newMarkup) {
|
||||||
|
// metadata is key1=value&key2=value&.... syntax used for urls
|
||||||
|
assert samePosition(newMarkup);
|
||||||
|
metadata += "&" + newMarkup.metadata;
|
||||||
|
}
|
||||||
|
boolean samePosition(Markup other) {
|
||||||
|
return this.start == other.start && this.end == other.end;
|
||||||
|
}
|
||||||
|
boolean overlaps(Markup other) {
|
||||||
|
return (start<=other.start && end >= other.start)
|
||||||
|
|| (start <= other.end && end >=other.end)
|
||||||
|
|| (start>=other.start && end<=other.end);
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "Markup [start=" + start + ", end=" + end + ", metadata=" + metadata + "]";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
// Merge original annotations and search hits into a single set of markups for each passage
|
||||||
|
static MarkupPassage mergeAnnotations(AnnotationToken [] annotations, Passage passage){
|
||||||
|
try {
|
||||||
|
MarkupPassage markupPassage = new MarkupPassage();
|
||||||
|
|
||||||
|
// Add search hits first - they take precedence over any other markup
|
||||||
|
for (int i = 0; i < passage.getNumMatches(); i++) {
|
||||||
|
int start = passage.getMatchStarts()[i];
|
||||||
|
int end = passage.getMatchEnds()[i];
|
||||||
|
String searchTerm = passage.getMatchTerms()[i].utf8ToString();
|
||||||
|
Markup markup = new Markup(start, end, SEARCH_HIT_TYPE+"="+URLEncoder.encode(searchTerm, StandardCharsets.UTF_8.name()));
|
||||||
|
markupPassage.addUnlessOverlapping(markup);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now add original text's annotations - ignoring any that might conflict with the search hits markup.
|
||||||
|
for (AnnotationToken token: annotations) {
|
||||||
|
int start = token.offset;
|
||||||
|
int end = token.endOffset;
|
||||||
|
if(start >= passage.getStartOffset() && end<=passage.getEndOffset()) {
|
||||||
|
String escapedValue = URLEncoder.encode(token.value, StandardCharsets.UTF_8.name());
|
||||||
|
Markup markup = new Markup(start, end, escapedValue);
|
||||||
|
markupPassage.addUnlessOverlapping(markup);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return markupPassage;
|
||||||
|
|
||||||
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
// We should always have UTF-8 support
|
||||||
|
throw new IllegalStateException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Snippet[] format(Passage[] passages, String content) {
|
||||||
|
Snippet[] snippets = new Snippet[passages.length];
|
||||||
|
|
||||||
|
int pos;
|
||||||
|
int j = 0;
|
||||||
|
for (Passage passage : passages) {
|
||||||
|
AnnotationToken [] annotations = annotatedHighlighterAnalyzer.getIntersectingAnnotations(passage.getStartOffset(),
|
||||||
|
passage.getEndOffset());
|
||||||
|
MarkupPassage mergedMarkup = mergeAnnotations(annotations, passage);
|
||||||
|
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
pos = passage.getStartOffset();
|
||||||
|
for(Markup markup: mergedMarkup.markups) {
|
||||||
|
int start = markup.start;
|
||||||
|
int end = markup.end;
|
||||||
|
// its possible to have overlapping terms
|
||||||
|
if (start > pos) {
|
||||||
|
append(sb, content, pos, start);
|
||||||
|
}
|
||||||
|
if (end > pos) {
|
||||||
|
sb.append("[");
|
||||||
|
append(sb, content, Math.max(pos, start), end);
|
||||||
|
|
||||||
|
sb.append("](");
|
||||||
|
sb.append(markup.metadata);
|
||||||
|
sb.append(")");
|
||||||
|
pos = end;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// its possible a "term" from the analyzer could span a sentence boundary.
|
||||||
|
append(sb, content, pos, Math.max(pos, passage.getEndOffset()));
|
||||||
|
//we remove the paragraph separator if present at the end of the snippet (we used it as separator between values)
|
||||||
|
if (sb.charAt(sb.length() - 1) == HighlightUtils.PARAGRAPH_SEPARATOR) {
|
||||||
|
sb.deleteCharAt(sb.length() - 1);
|
||||||
|
} else if (sb.charAt(sb.length() - 1) == HighlightUtils.NULL_SEPARATOR) {
|
||||||
|
sb.deleteCharAt(sb.length() - 1);
|
||||||
|
}
|
||||||
|
//and we trim the snippets too
|
||||||
|
snippets[j++] = new Snippet(sb.toString().trim(), passage.getScore(), passage.getNumMatches() > 0);
|
||||||
|
}
|
||||||
|
return snippets;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void append(StringBuilder dest, String content, int start, int end) {
|
||||||
|
dest.append(encoder.encodeText(content.substring(start, end)));
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,64 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.fetch.subphase.highlight;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.apache.lucene.search.highlight.Encoder;
|
||||||
|
import org.apache.lucene.search.uhighlight.PassageFormatter;
|
||||||
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer;
|
||||||
|
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
|
||||||
|
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class AnnotatedTextHighlighter extends UnifiedHighlighter {
|
||||||
|
|
||||||
|
public static final String NAME = "annotated";
|
||||||
|
|
||||||
|
AnnotatedHighlighterAnalyzer annotatedHighlighterAnalyzer = null;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) {
|
||||||
|
annotatedHighlighterAnalyzer = new AnnotatedHighlighterAnalyzer(super.getAnalyzer(docMapper, type));
|
||||||
|
return annotatedHighlighterAnalyzer;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the marked-up values held on-disk to plain-text versions for highlighting
|
||||||
|
@Override
|
||||||
|
protected List<Object> loadFieldValues(MappedFieldType fieldType, Field field, SearchContext context, HitContext hitContext)
|
||||||
|
throws IOException {
|
||||||
|
List<Object> fieldValues = super.loadFieldValues(fieldType, field, context, hitContext);
|
||||||
|
String[] fieldValuesAsString = fieldValues.toArray(new String[fieldValues.size()]);
|
||||||
|
annotatedHighlighterAnalyzer.init(fieldValuesAsString);
|
||||||
|
return Arrays.asList((Object[]) annotatedHighlighterAnalyzer.getPlainTextValuesForHighlighter());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PassageFormatter getPassageFormatter(SearchContextHighlight.Field field, Encoder encoder) {
|
||||||
|
return new AnnotatedPassageFormatter(annotatedHighlighterAnalyzer, encoder);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -17,24 +17,23 @@
|
|||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.discovery.file;
|
package org.elasticsearch.index.mapper.annotatedtext;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||||
|
|
||||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||||
|
|
||||||
/**
|
public class AnnotatedTextClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||||
* Integration tests to make sure the file-based discovery plugin works in a cluster.
|
|
||||||
*/
|
|
||||||
public class FileBasedDiscoveryClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
|
||||||
|
|
||||||
public FileBasedDiscoveryClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
public AnnotatedTextClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||||
super(testCandidate);
|
super(testCandidate);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParametersFactory
|
@ParametersFactory
|
||||||
public static Iterable<Object[]> parameters() throws Exception {
|
public static Iterable<Object[]> parameters() throws Exception {
|
||||||
return ESClientYamlSuiteTestCase.createParameters();
|
return createParameters();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -0,0 +1,681 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.mapper.annotatedtext;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DocValuesType;
|
||||||
|
import org.apache.lucene.index.IndexOptions;
|
||||||
|
import org.apache.lucene.index.IndexableField;
|
||||||
|
import org.apache.lucene.index.IndexableFieldType;
|
||||||
|
import org.apache.lucene.index.LeafReader;
|
||||||
|
import org.apache.lucene.index.PostingsEnum;
|
||||||
|
import org.apache.lucene.index.Terms;
|
||||||
|
import org.apache.lucene.index.TermsEnum;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||||
|
import org.elasticsearch.action.index.IndexRequest;
|
||||||
|
import org.elasticsearch.action.termvectors.TermVectorsRequest;
|
||||||
|
import org.elasticsearch.action.termvectors.TermVectorsResponse;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.index.IndexService;
|
||||||
|
import org.elasticsearch.index.VersionType;
|
||||||
|
import org.elasticsearch.index.engine.Engine;
|
||||||
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
|
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||||
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
|
import org.elasticsearch.index.mapper.SourceToParse;
|
||||||
|
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||||
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
|
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||||
|
import org.elasticsearch.indices.IndicesService;
|
||||||
|
import org.elasticsearch.plugin.mapper.AnnotatedTextPlugin;
|
||||||
|
import org.elasticsearch.plugins.Plugin;
|
||||||
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
|
public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
IndexService indexService;
|
||||||
|
DocumentMapperParser parser;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setup() {
|
||||||
|
Settings settings = Settings.builder()
|
||||||
|
.put("index.analysis.filter.mySynonyms.type", "synonym")
|
||||||
|
.putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto"))
|
||||||
|
.put("index.analysis.analyzer.synonym.tokenizer", "standard")
|
||||||
|
.put("index.analysis.analyzer.synonym.filter", "mySynonyms")
|
||||||
|
// Stop filter remains in server as it is part of lucene-core
|
||||||
|
.put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard")
|
||||||
|
.put("index.analysis.analyzer.my_stop_analyzer.filter", "stop")
|
||||||
|
.build();
|
||||||
|
indexService = createIndex("test", settings);
|
||||||
|
parser = indexService.mapperService().documentMapperParser();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
|
List<Class<? extends Plugin>> classpathPlugins = new ArrayList<>();
|
||||||
|
classpathPlugins.add(AnnotatedTextPlugin.class);
|
||||||
|
return classpathPlugins;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
protected String getFieldType() {
|
||||||
|
return "annotated_text";
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAnnotationInjection() throws IOException {
|
||||||
|
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = indexService.mapperService().merge("type",
|
||||||
|
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||||
|
|
||||||
|
// Use example of typed and untyped annotations
|
||||||
|
String annotatedText = "He paid [Stormy Daniels](Stephanie+Clifford&Payee) hush money";
|
||||||
|
SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("field", annotatedText)
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON);
|
||||||
|
ParsedDocument doc = mapper.parse(sourceToParse);
|
||||||
|
|
||||||
|
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||||
|
assertEquals(1, fields.length);
|
||||||
|
|
||||||
|
assertEquals(annotatedText, fields[0].stringValue());
|
||||||
|
|
||||||
|
IndexShard shard = indexService.getShard(0);
|
||||||
|
shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL,
|
||||||
|
sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false);
|
||||||
|
shard.refresh("test");
|
||||||
|
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||||
|
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
||||||
|
TermsEnum terms = leaf.terms("field").iterator();
|
||||||
|
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("stormy")));
|
||||||
|
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
|
assertEquals(0, postings.nextDoc());
|
||||||
|
assertEquals(2, postings.nextPosition());
|
||||||
|
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("Stephanie Clifford")));
|
||||||
|
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
|
assertEquals(0, postings.nextDoc());
|
||||||
|
assertEquals(2, postings.nextPosition());
|
||||||
|
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("Payee")));
|
||||||
|
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
|
assertEquals(0, postings.nextDoc());
|
||||||
|
assertEquals(2, postings.nextPosition());
|
||||||
|
|
||||||
|
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("hush")));
|
||||||
|
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
|
assertEquals(0, postings.nextDoc());
|
||||||
|
assertEquals(4, postings.nextPosition());
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testToleranceForBadAnnotationMarkup() throws IOException {
|
||||||
|
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = indexService.mapperService().merge("type",
|
||||||
|
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||||
|
|
||||||
|
String annotatedText = "foo [bar](MissingEndBracket baz";
|
||||||
|
SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("field", annotatedText)
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON);
|
||||||
|
ParsedDocument doc = mapper.parse(sourceToParse);
|
||||||
|
|
||||||
|
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||||
|
assertEquals(1, fields.length);
|
||||||
|
|
||||||
|
assertEquals(annotatedText, fields[0].stringValue());
|
||||||
|
|
||||||
|
IndexShard shard = indexService.getShard(0);
|
||||||
|
shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL,
|
||||||
|
sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false);
|
||||||
|
shard.refresh("test");
|
||||||
|
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||||
|
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
||||||
|
TermsEnum terms = leaf.terms("field").iterator();
|
||||||
|
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("foo")));
|
||||||
|
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
|
assertEquals(0, postings.nextDoc());
|
||||||
|
assertEquals(0, postings.nextPosition());
|
||||||
|
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("bar")));
|
||||||
|
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
|
assertEquals(0, postings.nextDoc());
|
||||||
|
assertEquals(1, postings.nextPosition());
|
||||||
|
|
||||||
|
assertFalse(terms.seekExact(new BytesRef("MissingEndBracket")));
|
||||||
|
// Bad markup means value is treated as plain text and fed through tokenisation
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("missingendbracket")));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAgainstTermVectorsAPI() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("tvfield").field("type", getFieldType())
|
||||||
|
.field("term_vector", "with_positions_offsets_payloads")
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||||
|
|
||||||
|
|
||||||
|
int max = between(3, 10);
|
||||||
|
BulkRequestBuilder bulk = client().prepareBulk();
|
||||||
|
for (int i = 0; i < max; i++) {
|
||||||
|
bulk.add(client().prepareIndex("test", "type", Integer.toString(i))
|
||||||
|
.setSource("tvfield", "the quick [brown](Color) fox jumped over the lazy dog"));
|
||||||
|
}
|
||||||
|
bulk.get();
|
||||||
|
|
||||||
|
TermVectorsRequest request = new TermVectorsRequest("test", "type", "0").termStatistics(true);
|
||||||
|
|
||||||
|
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||||
|
IndexService test = indicesService.indexService(resolveIndex("test"));
|
||||||
|
IndexShard shard = test.getShardOrNull(0);
|
||||||
|
assertThat(shard, notNullValue());
|
||||||
|
TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request);
|
||||||
|
assertEquals(1, response.getFields().size());
|
||||||
|
|
||||||
|
Terms terms = response.getFields().terms("tvfield");
|
||||||
|
TermsEnum iterator = terms.iterator();
|
||||||
|
BytesRef term;
|
||||||
|
Set<String> foundTerms = new HashSet<>();
|
||||||
|
while ((term = iterator.next()) != null) {
|
||||||
|
foundTerms.add(term.utf8ToString());
|
||||||
|
}
|
||||||
|
//Check we have both text and annotation tokens
|
||||||
|
assertTrue(foundTerms.contains("brown"));
|
||||||
|
assertTrue(foundTerms.contains("Color"));
|
||||||
|
assertTrue(foundTerms.contains("fox"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===== Code below copied from TextFieldMapperTests ========
|
||||||
|
|
||||||
|
public void testDefaults() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("field", "1234")
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||||
|
assertEquals(1, fields.length);
|
||||||
|
|
||||||
|
assertEquals("1234", fields[0].stringValue());
|
||||||
|
IndexableFieldType fieldType = fields[0].fieldType();
|
||||||
|
assertThat(fieldType.omitNorms(), equalTo(false));
|
||||||
|
assertTrue(fieldType.tokenized());
|
||||||
|
assertFalse(fieldType.stored());
|
||||||
|
assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS));
|
||||||
|
assertThat(fieldType.storeTermVectors(), equalTo(false));
|
||||||
|
assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
|
||||||
|
assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
|
||||||
|
assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
|
||||||
|
assertEquals(DocValuesType.NONE, fieldType.docValuesType());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEnableStore() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field").field("type", getFieldType()).field("store", true).endObject().endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("field", "1234")
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||||
|
assertEquals(1, fields.length);
|
||||||
|
assertTrue(fields[0].fieldType().stored());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDisableNorms() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("norms", false)
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("field", "1234")
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||||
|
assertEquals(1, fields.length);
|
||||||
|
assertTrue(fields[0].fieldType().omitNorms());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testIndexOptions() throws IOException {
|
||||||
|
Map<String, IndexOptions> supportedOptions = new HashMap<>();
|
||||||
|
supportedOptions.put("docs", IndexOptions.DOCS);
|
||||||
|
supportedOptions.put("freqs", IndexOptions.DOCS_AND_FREQS);
|
||||||
|
supportedOptions.put("positions", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||||
|
supportedOptions.put("offsets", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
|
||||||
|
|
||||||
|
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties");
|
||||||
|
for (String option : supportedOptions.keySet()) {
|
||||||
|
mappingBuilder.startObject(option).field("type", getFieldType()).field("index_options", option).endObject();
|
||||||
|
}
|
||||||
|
String mapping = Strings.toString(mappingBuilder.endObject().endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
XContentBuilder jsonDoc = XContentFactory.jsonBuilder().startObject();
|
||||||
|
for (String option : supportedOptions.keySet()) {
|
||||||
|
jsonDoc.field(option, "1234");
|
||||||
|
}
|
||||||
|
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
for (Map.Entry<String, IndexOptions> entry : supportedOptions.entrySet()) {
|
||||||
|
String field = entry.getKey();
|
||||||
|
IndexOptions options = entry.getValue();
|
||||||
|
IndexableField[] fields = doc.rootDoc().getFields(field);
|
||||||
|
assertEquals(1, fields.length);
|
||||||
|
assertEquals(options, fields[0].fieldType().indexOptions());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDefaultPositionIncrementGap() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = indexService.mapperService().merge("type",
|
||||||
|
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||||
|
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.array("field", new String[] {"a", "b"})
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON);
|
||||||
|
ParsedDocument doc = mapper.parse(sourceToParse);
|
||||||
|
|
||||||
|
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||||
|
assertEquals(2, fields.length);
|
||||||
|
|
||||||
|
assertEquals("a", fields[0].stringValue());
|
||||||
|
assertEquals("b", fields[1].stringValue());
|
||||||
|
|
||||||
|
IndexShard shard = indexService.getShard(0);
|
||||||
|
shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL,
|
||||||
|
sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false);
|
||||||
|
shard.refresh("test");
|
||||||
|
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||||
|
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
||||||
|
TermsEnum terms = leaf.terms("field").iterator();
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("b")));
|
||||||
|
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
|
assertEquals(0, postings.nextDoc());
|
||||||
|
assertEquals(TextFieldMapper.Defaults.POSITION_INCREMENT_GAP + 1, postings.nextPosition());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPositionIncrementGap() throws IOException {
|
||||||
|
final int positionIncrementGap = randomIntBetween(1, 1000);
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("position_increment_gap", positionIncrementGap)
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = indexService.mapperService().merge("type",
|
||||||
|
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||||
|
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.array("field", new String[]{"a", "b"})
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON);
|
||||||
|
ParsedDocument doc = mapper.parse(sourceToParse);
|
||||||
|
|
||||||
|
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||||
|
assertEquals(2, fields.length);
|
||||||
|
|
||||||
|
assertEquals("a", fields[0].stringValue());
|
||||||
|
assertEquals("b", fields[1].stringValue());
|
||||||
|
|
||||||
|
IndexShard shard = indexService.getShard(0);
|
||||||
|
shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL,
|
||||||
|
sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false);
|
||||||
|
shard.refresh("test");
|
||||||
|
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||||
|
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
||||||
|
TermsEnum terms = leaf.terms("field").iterator();
|
||||||
|
assertTrue(terms.seekExact(new BytesRef("b")));
|
||||||
|
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
|
assertEquals(0, postings.nextDoc());
|
||||||
|
assertEquals(positionIncrementGap + 1, postings.nextPosition());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSearchAnalyzerSerialization() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("analyzer", "standard")
|
||||||
|
.field("search_analyzer", "keyword")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
// special case: default index analyzer
|
||||||
|
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("analyzer", "default")
|
||||||
|
.field("search_analyzer", "keyword")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().endObject());
|
||||||
|
|
||||||
|
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("analyzer", "keyword")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().endObject());
|
||||||
|
|
||||||
|
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
// special case: default search analyzer
|
||||||
|
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("analyzer", "keyword")
|
||||||
|
.field("search_analyzer", "default")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().endObject());
|
||||||
|
|
||||||
|
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("analyzer", "keyword")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().endObject());
|
||||||
|
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||||
|
builder.startObject();
|
||||||
|
mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")));
|
||||||
|
builder.endObject();
|
||||||
|
|
||||||
|
String mappingString = Strings.toString(builder);
|
||||||
|
assertTrue(mappingString.contains("analyzer"));
|
||||||
|
assertTrue(mappingString.contains("search_analyzer"));
|
||||||
|
assertTrue(mappingString.contains("search_quote_analyzer"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSearchQuoteAnalyzerSerialization() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("analyzer", "standard")
|
||||||
|
.field("search_analyzer", "standard")
|
||||||
|
.field("search_quote_analyzer", "keyword")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
|
||||||
|
// special case: default index/search analyzer
|
||||||
|
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("analyzer", "default")
|
||||||
|
.field("search_analyzer", "default")
|
||||||
|
.field("search_quote_analyzer", "keyword")
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject().endObject());
|
||||||
|
|
||||||
|
mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
assertEquals(mapping, mapper.mappingSource().toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testTermVectors() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field1")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("term_vector", "no")
|
||||||
|
.endObject()
|
||||||
|
.startObject("field2")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("term_vector", "yes")
|
||||||
|
.endObject()
|
||||||
|
.startObject("field3")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("term_vector", "with_offsets")
|
||||||
|
.endObject()
|
||||||
|
.startObject("field4")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("term_vector", "with_positions")
|
||||||
|
.endObject()
|
||||||
|
.startObject("field5")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("term_vector", "with_positions_offsets")
|
||||||
|
.endObject()
|
||||||
|
.startObject("field6")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("term_vector", "with_positions_offsets_payloads")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("field1", "1234")
|
||||||
|
.field("field2", "1234")
|
||||||
|
.field("field3", "1234")
|
||||||
|
.field("field4", "1234")
|
||||||
|
.field("field5", "1234")
|
||||||
|
.field("field6", "1234")
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false));
|
||||||
|
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false));
|
||||||
|
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false));
|
||||||
|
assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||||
|
|
||||||
|
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false));
|
||||||
|
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false));
|
||||||
|
assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||||
|
|
||||||
|
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false));
|
||||||
|
assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||||
|
|
||||||
|
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false));
|
||||||
|
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||||
|
|
||||||
|
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false));
|
||||||
|
|
||||||
|
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true));
|
||||||
|
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testNullConfigValuesFail() throws MapperParsingException, IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("analyzer", (String) null)
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
Exception e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping)));
|
||||||
|
assertEquals("[analyzer] must not have a [null] value", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testNotIndexedField() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("index", false)
|
||||||
|
.endObject().endObject().endObject().endObject());
|
||||||
|
|
||||||
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> parser.parse("type", new CompressedXContent(mapping)));
|
||||||
|
assertEquals("[annotated_text] fields must be indexed", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException {
|
||||||
|
for (String indexOptions : Arrays.asList("docs", "freqs")) {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("field")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.field("index_options", indexOptions)
|
||||||
|
.field("position_increment_gap", 10)
|
||||||
|
.endObject().endObject().endObject().endObject());
|
||||||
|
|
||||||
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> parser.parse("type", new CompressedXContent(mapping)));
|
||||||
|
assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEmptyName() throws IOException {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("")
|
||||||
|
.field("type", getFieldType())
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject());
|
||||||
|
|
||||||
|
// Empty name not allowed in index created after 5.0
|
||||||
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> parser.parse("type", new CompressedXContent(mapping))
|
||||||
|
);
|
||||||
|
assertThat(e.getMessage(), containsString("name cannot be empty string"));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,73 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.mapper.annotatedtext;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
public class AnnotatedTextParsingTests extends ESTestCase {
|
||||||
|
|
||||||
|
private void checkParsing(String markup, String expectedPlainText, AnnotationToken... expectedTokens) {
|
||||||
|
AnnotatedText at = AnnotatedText.parse(markup);
|
||||||
|
assertEquals(expectedPlainText, at.textMinusMarkup);
|
||||||
|
List<AnnotationToken> actualAnnotations = at.annotations;
|
||||||
|
assertEquals(expectedTokens.length, actualAnnotations.size());
|
||||||
|
for (int i = 0; i < expectedTokens.length; i++) {
|
||||||
|
assertEquals(expectedTokens[i], actualAnnotations.get(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSingleValueMarkup() {
|
||||||
|
checkParsing("foo [bar](Y)", "foo bar", new AnnotationToken(4,7,"Y"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMultiValueMarkup() {
|
||||||
|
checkParsing("foo [bar](Y&B)", "foo bar", new AnnotationToken(4,7,"Y"),
|
||||||
|
new AnnotationToken(4,7,"B"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testBlankTextAnnotation() {
|
||||||
|
checkParsing("It sounded like this:[](theSoundOfOneHandClapping)", "It sounded like this:",
|
||||||
|
new AnnotationToken(21,21,"theSoundOfOneHandClapping"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMissingBracket() {
|
||||||
|
checkParsing("[foo](MissingEndBracket bar",
|
||||||
|
"[foo](MissingEndBracket bar", new AnnotationToken[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAnnotationWithType() {
|
||||||
|
Exception expectedException = expectThrows(ElasticsearchParseException.class,
|
||||||
|
() -> checkParsing("foo [bar](type=foo) baz", "foo bar baz", new AnnotationToken(4,7, "noType")));
|
||||||
|
assertThat(expectedException.getMessage(), equalTo("key=value pairs are not supported in annotations"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMissingValue() {
|
||||||
|
checkParsing("[foo]() bar", "foo bar", new AnnotationToken[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,185 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.highlight;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.FieldType;
|
||||||
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexOptions;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
|
import org.apache.lucene.search.PhraseQuery;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.Sort;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.search.TopDocs;
|
||||||
|
import org.apache.lucene.search.highlight.DefaultEncoder;
|
||||||
|
import org.apache.lucene.search.uhighlight.CustomSeparatorBreakIterator;
|
||||||
|
import org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter;
|
||||||
|
import org.apache.lucene.search.uhighlight.PassageFormatter;
|
||||||
|
import org.apache.lucene.search.uhighlight.Snippet;
|
||||||
|
import org.apache.lucene.search.uhighlight.SplittingBreakIterator;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer;
|
||||||
|
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotationAnalyzerWrapper;
|
||||||
|
import org.elasticsearch.search.fetch.subphase.highlight.AnnotatedPassageFormatter;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.net.URLEncoder;
|
||||||
|
import java.text.BreakIterator;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
import static org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter.MULTIVAL_SEP_CHAR;
|
||||||
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
|
|
||||||
|
public class AnnotatedTextHighlighterTests extends ESTestCase {
|
||||||
|
|
||||||
|
private void assertHighlightOneDoc(String fieldName, String []markedUpInputs,
|
||||||
|
Query query, Locale locale, BreakIterator breakIterator,
|
||||||
|
int noMatchSize, String[] expectedPassages) throws Exception {
|
||||||
|
|
||||||
|
// Annotated fields wrap the usual analyzer with one that injects extra tokens
|
||||||
|
Analyzer wrapperAnalyzer = new AnnotationAnalyzerWrapper(new StandardAnalyzer());
|
||||||
|
AnnotatedHighlighterAnalyzer hiliteAnalyzer = new AnnotatedHighlighterAnalyzer(wrapperAnalyzer);
|
||||||
|
hiliteAnalyzer.init(markedUpInputs);
|
||||||
|
PassageFormatter passageFormatter = new AnnotatedPassageFormatter(hiliteAnalyzer,new DefaultEncoder());
|
||||||
|
String []plainTextForHighlighter = hiliteAnalyzer.getPlainTextValuesForHighlighter();
|
||||||
|
|
||||||
|
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriterConfig iwc = newIndexWriterConfig(wrapperAnalyzer);
|
||||||
|
iwc.setMergePolicy(newTieredMergePolicy(random()));
|
||||||
|
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
|
||||||
|
FieldType ft = new FieldType(TextField.TYPE_STORED);
|
||||||
|
if (randomBoolean()) {
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
|
||||||
|
} else {
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
|
||||||
|
}
|
||||||
|
ft.freeze();
|
||||||
|
Document doc = new Document();
|
||||||
|
for (String input : markedUpInputs) {
|
||||||
|
Field field = new Field(fieldName, "", ft);
|
||||||
|
field.setStringValue(input);
|
||||||
|
doc.add(field);
|
||||||
|
}
|
||||||
|
iw.addDocument(doc);
|
||||||
|
DirectoryReader reader = iw.getReader();
|
||||||
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
iw.close();
|
||||||
|
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER);
|
||||||
|
assertThat(topDocs.totalHits.value, equalTo(1L));
|
||||||
|
String rawValue = Strings.arrayToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR));
|
||||||
|
|
||||||
|
CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, hiliteAnalyzer, null,
|
||||||
|
passageFormatter, locale,
|
||||||
|
breakIterator, rawValue, noMatchSize);
|
||||||
|
highlighter.setFieldMatcher((name) -> "text".equals(name));
|
||||||
|
final Snippet[] snippets =
|
||||||
|
highlighter.highlightField("text", query, topDocs.scoreDocs[0].doc, expectedPassages.length);
|
||||||
|
assertEquals(expectedPassages.length, snippets.length);
|
||||||
|
for (int i = 0; i < snippets.length; i++) {
|
||||||
|
assertEquals(expectedPassages[i], snippets[i].getText());
|
||||||
|
}
|
||||||
|
reader.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void testAnnotatedTextStructuredMatch() throws Exception {
|
||||||
|
// Check that a structured token eg a URL can be highlighted in a query
|
||||||
|
// on marked-up
|
||||||
|
// content using an "annotated_text" type field.
|
||||||
|
String url = "https://en.wikipedia.org/wiki/Key_Word_in_Context";
|
||||||
|
String encodedUrl = URLEncoder.encode(url, "UTF-8");
|
||||||
|
String annotatedWord = "[highlighting](" + encodedUrl + ")";
|
||||||
|
String highlightedAnnotatedWord = "[highlighting](" + AnnotatedPassageFormatter.SEARCH_HIT_TYPE + "=" + encodedUrl + "&"
|
||||||
|
+ encodedUrl + ")";
|
||||||
|
final String[] markedUpInputs = { "This is a test. Just a test1 " + annotatedWord + " from [annotated](bar) highlighter.",
|
||||||
|
"This is the second " + annotatedWord + " value to perform highlighting on a longer text that gets scored lower." };
|
||||||
|
|
||||||
|
String[] expectedPassages = {
|
||||||
|
"This is a test. Just a test1 " + highlightedAnnotatedWord + " from [annotated](bar) highlighter.",
|
||||||
|
"This is the second " + highlightedAnnotatedWord + " value to perform highlighting on a"
|
||||||
|
+ " longer text that gets scored lower." };
|
||||||
|
Query query = new TermQuery(new Term("text", url));
|
||||||
|
BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR);
|
||||||
|
assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAnnotatedTextOverlapsWithUnstructuredSearchTerms() throws Exception {
|
||||||
|
final String[] markedUpInputs = { "[Donald Trump](Donald+Trump) visited Singapore",
|
||||||
|
"Donald duck is a [Disney](Disney+Inc) invention" };
|
||||||
|
|
||||||
|
String[] expectedPassages = { "[Donald](_hit_term=donald) Trump visited Singapore",
|
||||||
|
"[Donald](_hit_term=donald) duck is a [Disney](Disney+Inc) invention" };
|
||||||
|
Query query = new TermQuery(new Term("text", "donald"));
|
||||||
|
BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR);
|
||||||
|
assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAnnotatedTextMultiFieldWithBreakIterator() throws Exception {
|
||||||
|
final String[] markedUpInputs = { "[Donald Trump](Donald+Trump) visited Singapore. Kim shook hands with Donald",
|
||||||
|
"Donald duck is a [Disney](Disney+Inc) invention" };
|
||||||
|
String[] expectedPassages = { "[Donald](_hit_term=donald) Trump visited Singapore",
|
||||||
|
"Kim shook hands with [Donald](_hit_term=donald)",
|
||||||
|
"[Donald](_hit_term=donald) duck is a [Disney](Disney+Inc) invention" };
|
||||||
|
Query query = new TermQuery(new Term("text", "donald"));
|
||||||
|
BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR);
|
||||||
|
breakIterator = new SplittingBreakIterator(breakIterator, '.');
|
||||||
|
assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAnnotatedTextSingleFieldWithBreakIterator() throws Exception {
|
||||||
|
final String[] markedUpInputs = { "[Donald Trump](Donald+Trump) visited Singapore. Kim shook hands with Donald"};
|
||||||
|
String[] expectedPassages = { "[Donald](_hit_term=donald) Trump visited Singapore",
|
||||||
|
"Kim shook hands with [Donald](_hit_term=donald)"};
|
||||||
|
Query query = new TermQuery(new Term("text", "donald"));
|
||||||
|
BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR);
|
||||||
|
breakIterator = new SplittingBreakIterator(breakIterator, '.');
|
||||||
|
assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAnnotatedTextSingleFieldWithPhraseQuery() throws Exception {
|
||||||
|
final String[] markedUpInputs = { "[Donald Trump](Donald+Trump) visited Singapore",
|
||||||
|
"Donald Jr was with Melania Trump"};
|
||||||
|
String[] expectedPassages = { "[Donald](_hit_term=donald) [Trump](_hit_term=trump) visited Singapore"};
|
||||||
|
Query query = new PhraseQuery("text", "donald", "trump");
|
||||||
|
BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR);
|
||||||
|
assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testBadAnnotation() throws Exception {
|
||||||
|
final String[] markedUpInputs = { "Missing bracket for [Donald Trump](Donald+Trump visited Singapore"};
|
||||||
|
String[] expectedPassages = { "Missing bracket for [Donald Trump](Donald+Trump visited [Singapore](_hit_term=singapore)"};
|
||||||
|
Query query = new TermQuery(new Term("text", "singapore"));
|
||||||
|
BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR);
|
||||||
|
assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,44 @@
|
|||||||
|
# Integration tests for Mapper Annotated_text components
|
||||||
|
#
|
||||||
|
|
||||||
|
---
|
||||||
|
"annotated highlighter on annotated text":
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: Annotated text type introduced in 7.0.0-alpha1
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: annotated
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
number_of_shards: "1"
|
||||||
|
number_of_replicas: "0"
|
||||||
|
mappings:
|
||||||
|
doc:
|
||||||
|
properties:
|
||||||
|
text:
|
||||||
|
type: annotated_text
|
||||||
|
entityID:
|
||||||
|
type: keyword
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: annotated
|
||||||
|
type: doc
|
||||||
|
body:
|
||||||
|
"text" : "The [quick brown fox](entity_3789) is brown."
|
||||||
|
"entityID": "entity_3789"
|
||||||
|
refresh: true
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body: { "query" : {"term" : { "entityID" : "entity_3789" } }, "highlight" : { "type" : "annotated", "require_field_match": false, "fields" : { "text" : {} } } }
|
||||||
|
|
||||||
|
- match: {hits.hits.0.highlight.text.0: "The [quick brown fox](_hit_term=entity_3789&entity_3789) is brown."}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body: { "query" : {"term" : { "text" : "quick" } }, "highlight" : { "type" : "annotated", "require_field_match": false, "fields" : { "text" : {} } } }
|
||||||
|
|
||||||
|
- match: {hits.hits.0.highlight.text.0: "The [quick](_hit_term=quick) brown fox is brown."}
|
@ -218,7 +218,7 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testCorsEnabledWithoutAllowOrigins() throws IOException {
|
public void testCorsEnabledWithoutAllowOrigins() throws IOException {
|
||||||
// Set up a HTTP transport with only the CORS enabled setting
|
// Set up an HTTP transport with only the CORS enabled setting
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true)
|
.put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true)
|
||||||
.build();
|
.build();
|
||||||
@ -233,7 +233,7 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
|
|||||||
|
|
||||||
public void testCorsEnabledWithAllowOrigins() throws IOException {
|
public void testCorsEnabledWithAllowOrigins() throws IOException {
|
||||||
final String originValue = "remote-host";
|
final String originValue = "remote-host";
|
||||||
// create a http transport with CORS enabled and allow origin configured
|
// create an HTTP transport with CORS enabled and allow origin configured
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||||
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
|
.put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue)
|
||||||
@ -252,7 +252,7 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
|
|||||||
public void testCorsAllowOriginWithSameHost() throws IOException {
|
public void testCorsAllowOriginWithSameHost() throws IOException {
|
||||||
String originValue = "remote-host";
|
String originValue = "remote-host";
|
||||||
String host = "remote-host";
|
String host = "remote-host";
|
||||||
// create a http transport with CORS enabled
|
// create an HTTP transport with CORS enabled
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put(SETTING_CORS_ENABLED.getKey(), true)
|
.put(SETTING_CORS_ENABLED.getKey(), true)
|
||||||
.build();
|
.build();
|
||||||
|
@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.containsString;
|
|||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests that when disabling detailed errors, a request with the error_trace parameter returns a HTTP 400
|
* Tests that when disabling detailed errors, a request with the error_trace parameter returns an HTTP 400 response.
|
||||||
*/
|
*/
|
||||||
@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1)
|
@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1)
|
||||||
public class DetailedErrorsDisabledIT extends HttpSmokeTestCase {
|
public class DetailedErrorsDisabledIT extends HttpSmokeTestCase {
|
||||||
|
@ -224,10 +224,6 @@ fi
|
|||||||
install_and_check_plugin discovery ec2 aws-java-sdk-core-*.jar
|
install_and_check_plugin discovery ec2 aws-java-sdk-core-*.jar
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "[$GROUP] install discovery-file plugin" {
|
|
||||||
install_and_check_plugin discovery file
|
|
||||||
}
|
|
||||||
|
|
||||||
@test "[$GROUP] install ingest-attachment plugin" {
|
@test "[$GROUP] install ingest-attachment plugin" {
|
||||||
# we specify the version on the poi-3.17.jar so that the test does
|
# we specify the version on the poi-3.17.jar so that the test does
|
||||||
# not spuriously pass if the jar is missing but the other poi jars
|
# not spuriously pass if the jar is missing but the other poi jars
|
||||||
@ -266,6 +262,10 @@ fi
|
|||||||
install_and_check_plugin mapper murmur3
|
install_and_check_plugin mapper murmur3
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@test "[$GROUP] install annotated-text mapper plugin" {
|
||||||
|
install_and_check_plugin mapper annotated-text
|
||||||
|
}
|
||||||
|
|
||||||
@test "[$GROUP] check reindex module" {
|
@test "[$GROUP] check reindex module" {
|
||||||
check_module reindex
|
check_module reindex
|
||||||
}
|
}
|
||||||
@ -360,10 +360,6 @@ fi
|
|||||||
remove_plugin discovery-ec2
|
remove_plugin discovery-ec2
|
||||||
}
|
}
|
||||||
|
|
||||||
@test "[$GROUP] remove discovery-file plugin" {
|
|
||||||
remove_plugin discovery-file
|
|
||||||
}
|
|
||||||
|
|
||||||
@test "[$GROUP] remove ingest-attachment plugin" {
|
@test "[$GROUP] remove ingest-attachment plugin" {
|
||||||
remove_plugin ingest-attachment
|
remove_plugin ingest-attachment
|
||||||
}
|
}
|
||||||
@ -380,6 +376,10 @@ fi
|
|||||||
remove_plugin mapper-murmur3
|
remove_plugin mapper-murmur3
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@test "[$GROUP] remove annotated-text mapper plugin" {
|
||||||
|
remove_plugin mapper-annotated-text
|
||||||
|
}
|
||||||
|
|
||||||
@test "[$GROUP] remove size mapper plugin" {
|
@test "[$GROUP] remove size mapper plugin" {
|
||||||
remove_plugin mapper-size
|
remove_plugin mapper-size
|
||||||
}
|
}
|
||||||
|
@ -19,9 +19,10 @@ Test file structure
|
|||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
A YAML test file consists of:
|
A YAML test file consists of:
|
||||||
* an optional `setup` section, followed by
|
|
||||||
* an optional `teardown` section, followed by
|
- an optional `setup` section, followed by
|
||||||
* one or more test sections
|
- an optional `teardown` section, followed by
|
||||||
|
- one or more test sections
|
||||||
|
|
||||||
For instance:
|
For instance:
|
||||||
|
|
||||||
@ -216,11 +217,13 @@ sent to nodes that match the `node_selector`. It looks like this:
|
|||||||
|
|
||||||
If you list multiple selectors then the request will only go to nodes that
|
If you list multiple selectors then the request will only go to nodes that
|
||||||
match all of those selectors. The following selectors are supported:
|
match all of those selectors. The following selectors are supported:
|
||||||
* `version`: Only nodes who's version is within the range will receive the
|
|
||||||
|
- `version`: Only nodes who's version is within the range will receive the
|
||||||
request. The syntax for the pattern is the same as when `version` is within
|
request. The syntax for the pattern is the same as when `version` is within
|
||||||
`skip`.
|
`skip`.
|
||||||
* `attribute`: Only nodes that have an attribute matching the name and value
|
- `attribute`: Only nodes that have an attribute matching the name and value
|
||||||
of the provided attribute match. Looks like:
|
of the provided attribute match.
|
||||||
|
Looks like:
|
||||||
....
|
....
|
||||||
node_selector:
|
node_selector:
|
||||||
attribute:
|
attribute:
|
||||||
|
@ -1 +0,0 @@
|
|||||||
a22f1c6749ca4a3fbc9b330161a8ea3301cac8de
|
|
@ -0,0 +1 @@
|
|||||||
|
dce55e44af096cb9029cb26d22a14d8a9c5223ce
|
@ -1 +0,0 @@
|
|||||||
41ce415b93d75662cc2e790d09120bc0234d6b1b
|
|
@ -0,0 +1 @@
|
|||||||
|
d1d941758dc91ea7c2d515dd97b5d9b23b0f1874
|
@ -1 +0,0 @@
|
|||||||
06c1e4fa838807059d27aaf5405cfdfe7303369c
|
|
@ -0,0 +1 @@
|
|||||||
|
e884b8ce62a2102b24bfdbe8911674cd5b0d06d9
|
@ -1 +0,0 @@
|
|||||||
5b0a019a938deb58160647e7640b348bb99c10a8
|
|
@ -0,0 +1 @@
|
|||||||
|
3870972c07d7fa41a3bc58eb65952da53a16a406
|
@ -1 +0,0 @@
|
|||||||
4d813f3ba0ddd56bac728edb88ed8875e6acfd18
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user