Merge branch 'master' into feature/multi_cluster_search
This commit is contained in:
commit
418ec62bfb
|
@ -21,6 +21,7 @@ package org.elasticsearch.gradle
|
||||||
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
|
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
|
||||||
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||||
import org.gradle.api.GradleException
|
import org.gradle.api.GradleException
|
||||||
|
import org.gradle.api.InvalidUserDataException
|
||||||
import org.gradle.api.JavaVersion
|
import org.gradle.api.JavaVersion
|
||||||
import org.gradle.api.Plugin
|
import org.gradle.api.Plugin
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
|
@ -54,6 +55,11 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
void apply(Project project) {
|
void apply(Project project) {
|
||||||
|
if (project.pluginManager.hasPlugin('elasticsearch.standalone-rest-test')) {
|
||||||
|
throw new InvalidUserDataException('elasticsearch.standalone-test, '
|
||||||
|
+ 'elasticearch.standalone-rest-test, and elasticsearch.build '
|
||||||
|
+ 'are mutually exclusive')
|
||||||
|
}
|
||||||
project.pluginManager.apply('java')
|
project.pluginManager.apply('java')
|
||||||
project.pluginManager.apply('carrotsearch.randomized-testing')
|
project.pluginManager.apply('carrotsearch.randomized-testing')
|
||||||
// these plugins add lots of info to our jars
|
// these plugins add lots of info to our jars
|
||||||
|
|
|
@ -30,6 +30,7 @@ public class DocsTestPlugin extends RestTestPlugin {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void apply(Project project) {
|
public void apply(Project project) {
|
||||||
|
project.pluginManager.apply('elasticsearch.standalone-rest-test')
|
||||||
super.apply(project)
|
super.apply(project)
|
||||||
Map<String, String> defaultSubstitutions = [
|
Map<String, String> defaultSubstitutions = [
|
||||||
/* These match up with the asciidoc syntax for substitutions but
|
/* These match up with the asciidoc syntax for substitutions but
|
||||||
|
|
|
@ -18,15 +18,29 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.gradle.test
|
package org.elasticsearch.gradle.test
|
||||||
|
|
||||||
|
import org.elasticsearch.gradle.BuildPlugin
|
||||||
|
import org.gradle.api.InvalidUserDataException
|
||||||
import org.gradle.api.Plugin
|
import org.gradle.api.Plugin
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
|
|
||||||
/** A plugin to add rest integration tests. Used for qa projects. */
|
/**
|
||||||
|
* Adds support for starting an Elasticsearch cluster before running integration
|
||||||
|
* tests. Used in conjunction with {@link StandaloneRestTestPlugin} for qa
|
||||||
|
* projects and in conjunction with {@link BuildPlugin} for testing the rest
|
||||||
|
* client.
|
||||||
|
*/
|
||||||
public class RestTestPlugin implements Plugin<Project> {
|
public class RestTestPlugin implements Plugin<Project> {
|
||||||
|
List REQUIRED_PLUGINS = [
|
||||||
|
'elasticsearch.build',
|
||||||
|
'elasticsearch.standalone-rest-test']
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void apply(Project project) {
|
public void apply(Project project) {
|
||||||
project.pluginManager.apply(StandaloneTestBasePlugin)
|
if (false == REQUIRED_PLUGINS.any {project.pluginManager.hasPlugin(it)}) {
|
||||||
|
throw new InvalidUserDataException('elasticsearch.rest-test '
|
||||||
|
+ 'requires either elasticsearch.build or '
|
||||||
|
+ 'elasticsearch.standalone-test')
|
||||||
|
}
|
||||||
|
|
||||||
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
||||||
integTest.cluster.distribution = 'zip' // rest tests should run with the real zip
|
integTest.cluster.distribution = 'zip' // rest tests should run with the real zip
|
||||||
|
|
|
@ -24,15 +24,26 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin
|
||||||
import org.elasticsearch.gradle.BuildPlugin
|
import org.elasticsearch.gradle.BuildPlugin
|
||||||
import org.elasticsearch.gradle.VersionProperties
|
import org.elasticsearch.gradle.VersionProperties
|
||||||
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||||
|
import org.gradle.api.InvalidUserDataException
|
||||||
import org.gradle.api.Plugin
|
import org.gradle.api.Plugin
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
|
import org.gradle.api.Task
|
||||||
import org.gradle.api.plugins.JavaBasePlugin
|
import org.gradle.api.plugins.JavaBasePlugin
|
||||||
|
|
||||||
/** Configures the build to have a rest integration test. */
|
/**
|
||||||
public class StandaloneTestBasePlugin implements Plugin<Project> {
|
* Configures the build to compile tests against Elasticsearch's test framework
|
||||||
|
* and run REST tests. Use BuildPlugin if you want to build main code as well
|
||||||
|
* as tests.
|
||||||
|
*/
|
||||||
|
public class StandaloneRestTestPlugin implements Plugin<Project> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void apply(Project project) {
|
public void apply(Project project) {
|
||||||
|
if (project.pluginManager.hasPlugin('elasticsearch.build')) {
|
||||||
|
throw new InvalidUserDataException('elasticsearch.standalone-test, '
|
||||||
|
+ 'elasticsearch.standalone-test, and elasticsearch.build are '
|
||||||
|
+ 'mutually exclusive')
|
||||||
|
}
|
||||||
project.pluginManager.apply(JavaBasePlugin)
|
project.pluginManager.apply(JavaBasePlugin)
|
||||||
project.pluginManager.apply(RandomizedTestingPlugin)
|
project.pluginManager.apply(RandomizedTestingPlugin)
|
||||||
|
|
|
@ -25,12 +25,15 @@ import org.gradle.api.Plugin
|
||||||
import org.gradle.api.Project
|
import org.gradle.api.Project
|
||||||
import org.gradle.api.plugins.JavaBasePlugin
|
import org.gradle.api.plugins.JavaBasePlugin
|
||||||
|
|
||||||
/** A plugin to add tests only. Used for QA tests that run arbitrary unit tests. */
|
/**
|
||||||
|
* Configures the build to compile against Elasticsearch's test framework and
|
||||||
|
* run integration and unit tests. Use BuildPlugin if you want to build main
|
||||||
|
* code as well as tests. */
|
||||||
public class StandaloneTestPlugin implements Plugin<Project> {
|
public class StandaloneTestPlugin implements Plugin<Project> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void apply(Project project) {
|
public void apply(Project project) {
|
||||||
project.pluginManager.apply(StandaloneTestBasePlugin)
|
project.pluginManager.apply(StandaloneRestTestPlugin)
|
||||||
|
|
||||||
Map testOptions = [
|
Map testOptions = [
|
||||||
name: 'test',
|
name: 'test',
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
#
|
||||||
|
# Licensed to Elasticsearch under one or more contributor
|
||||||
|
# license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright
|
||||||
|
# ownership. Elasticsearch licenses this file to you under
|
||||||
|
# the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
implementation-class=org.elasticsearch.gradle.test.StandaloneRestTestPlugin
|
|
@ -0,0 +1,41 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
apply plugin: 'elasticsearch.build'
|
||||||
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
|
group = 'org.elasticsearch.client'
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
compile "org.elasticsearch:elasticsearch:${version}"
|
||||||
|
compile "org.elasticsearch.client:rest:${version}"
|
||||||
|
|
||||||
|
testCompile "org.elasticsearch.client:test:${version}"
|
||||||
|
testCompile "org.elasticsearch.test:framework:${version}"
|
||||||
|
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
|
||||||
|
testCompile "junit:junit:${versions.junit}"
|
||||||
|
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencyLicenses {
|
||||||
|
// Don't check licenses for dependency that are part of the elasticsearch project
|
||||||
|
// But any other dependency should have its license/notice/sha1
|
||||||
|
dependencies = project.configurations.runtime.fileCollection {
|
||||||
|
it.group.startsWith('org.elasticsearch') == false
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,53 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.http.Header;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* High level REST client that wraps an instance of the low level {@link RestClient} and allows to build requests and read responses.
|
||||||
|
* The provided {@link RestClient} is externally built and closed.
|
||||||
|
*/
|
||||||
|
public final class RestHighLevelClient {
|
||||||
|
|
||||||
|
private static final Log logger = LogFactory.getLog(RestHighLevelClient.class);
|
||||||
|
|
||||||
|
private final RestClient client;
|
||||||
|
|
||||||
|
public RestHighLevelClient(RestClient client) {
|
||||||
|
this.client = Objects.requireNonNull(client);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean ping(Header... headers) {
|
||||||
|
try {
|
||||||
|
client.performRequest("HEAD", "/", headers);
|
||||||
|
return true;
|
||||||
|
} catch(IOException exception) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,48 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public abstract class ESRestHighLevelClientTestCase extends ESRestTestCase {
|
||||||
|
|
||||||
|
private static RestHighLevelClient restHighLevelClient;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void initHighLevelClient() throws IOException {
|
||||||
|
super.initClient();
|
||||||
|
if (restHighLevelClient == null) {
|
||||||
|
restHighLevelClient = new RestHighLevelClient(client());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void cleanupClient() throws IOException {
|
||||||
|
restHighLevelClient = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static RestHighLevelClient highLevelClient() {
|
||||||
|
return restHighLevelClient;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
public class MainActionIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
|
public void testPing() {
|
||||||
|
assertTrue(highLevelClient().ping());
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.Header;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.mockito.ArgumentMatcher;
|
||||||
|
import org.mockito.internal.matchers.ArrayEquals;
|
||||||
|
import org.mockito.internal.matchers.VarargMatcher;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.SocketTimeoutException;
|
||||||
|
|
||||||
|
import static org.mockito.Matchers.any;
|
||||||
|
import static org.mockito.Matchers.argThat;
|
||||||
|
import static org.mockito.Matchers.eq;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.verify;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
public class RestHighLevelClientTests extends ESTestCase {
|
||||||
|
|
||||||
|
private RestClient restClient;
|
||||||
|
private RestHighLevelClient restHighLevelClient;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void initClient() throws IOException {
|
||||||
|
restClient = mock(RestClient.class);
|
||||||
|
restHighLevelClient = new RestHighLevelClient(restClient);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPing() throws IOException {
|
||||||
|
assertTrue(restHighLevelClient.ping());
|
||||||
|
verify(restClient).performRequest(eq("HEAD"), eq("/"), argThat(new HeadersVarargMatcher()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPingFailure() throws IOException {
|
||||||
|
when(restClient.performRequest(any(), any())).thenThrow(new IllegalStateException());
|
||||||
|
expectThrows(IllegalStateException.class, () -> restHighLevelClient.ping());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPingFailed() throws IOException {
|
||||||
|
when(restClient.performRequest(any(), any())).thenThrow(new SocketTimeoutException());
|
||||||
|
assertFalse(restHighLevelClient.ping());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPingWithHeaders() throws IOException {
|
||||||
|
Header[] headers = RestClientTestUtil.randomHeaders(random(), "Header");
|
||||||
|
assertTrue(restHighLevelClient.ping(headers));
|
||||||
|
verify(restClient).performRequest(eq("HEAD"), eq("/"), argThat(new HeadersVarargMatcher(headers)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private class HeadersVarargMatcher extends ArgumentMatcher<Header[]> implements VarargMatcher {
|
||||||
|
private Header[] expectedHeaders;
|
||||||
|
|
||||||
|
HeadersVarargMatcher(Header... expectedHeaders) {
|
||||||
|
this.expectedHeaders = expectedHeaders;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean matches(Object varargArgument) {
|
||||||
|
if (varargArgument instanceof Header[]) {
|
||||||
|
Header[] actualHeaders = (Header[]) varargArgument;
|
||||||
|
return new ArrayEquals(expectedHeaders).matches(actualHeaders);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -40,7 +40,6 @@ import java.net.InetAddress;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -50,7 +49,6 @@ import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes;
|
||||||
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
||||||
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
|
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertNotNull;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -77,8 +75,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
httpServer = createHttpServer();
|
httpServer = createHttpServer();
|
||||||
int numHeaders = randomIntBetween(0, 5);
|
defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default");
|
||||||
defaultHeaders = generateHeaders("Header-default", "Header-array", numHeaders);
|
|
||||||
RestClientBuilder restClientBuilder = RestClient.builder(
|
RestClientBuilder restClientBuilder = RestClient.builder(
|
||||||
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders);
|
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders);
|
||||||
if (pathPrefix.length() > 0) {
|
if (pathPrefix.length() > 0) {
|
||||||
|
@ -151,17 +148,11 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
||||||
if (method.equals("HEAD") == false) {
|
if (method.equals("HEAD") == false) {
|
||||||
standardHeaders.add("Content-length");
|
standardHeaders.add("Content-length");
|
||||||
}
|
}
|
||||||
|
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
|
||||||
final int numHeaders = randomIntBetween(1, 5);
|
|
||||||
final Header[] headers = generateHeaders("Header", "Header-array", numHeaders);
|
|
||||||
final Map<String, List<String>> expectedHeaders = new HashMap<>();
|
|
||||||
|
|
||||||
addHeaders(expectedHeaders, defaultHeaders, headers);
|
|
||||||
|
|
||||||
final int statusCode = randomStatusCode(getRandom());
|
final int statusCode = randomStatusCode(getRandom());
|
||||||
Response esResponse;
|
Response esResponse;
|
||||||
try {
|
try {
|
||||||
esResponse = restClient.performRequest(method, "/" + statusCode, Collections.<String, String>emptyMap(), headers);
|
esResponse = restClient.performRequest(method, "/" + statusCode, Collections.<String, String>emptyMap(), requestHeaders);
|
||||||
} catch(ResponseException e) {
|
} catch(ResponseException e) {
|
||||||
esResponse = e.getResponse();
|
esResponse = e.getResponse();
|
||||||
}
|
}
|
||||||
|
@ -169,24 +160,13 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
||||||
assertEquals(method, esResponse.getRequestLine().getMethod());
|
assertEquals(method, esResponse.getRequestLine().getMethod());
|
||||||
assertEquals(statusCode, esResponse.getStatusLine().getStatusCode());
|
assertEquals(statusCode, esResponse.getStatusLine().getStatusCode());
|
||||||
assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, esResponse.getRequestLine().getUri());
|
assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, esResponse.getRequestLine().getUri());
|
||||||
|
assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), standardHeaders);
|
||||||
for (final Header responseHeader : esResponse.getHeaders()) {
|
for (final Header responseHeader : esResponse.getHeaders()) {
|
||||||
final String name = responseHeader.getName();
|
String name = responseHeader.getName();
|
||||||
final String value = responseHeader.getValue();
|
if (name.startsWith("Header") == false) {
|
||||||
if (name.startsWith("Header")) {
|
|
||||||
final List<String> values = expectedHeaders.get(name);
|
|
||||||
assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values);
|
|
||||||
assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value));
|
|
||||||
|
|
||||||
// we've collected them all
|
|
||||||
if (values.isEmpty()) {
|
|
||||||
expectedHeaders.remove(name);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
assertTrue("unknown header was returned " + name, standardHeaders.remove(name));
|
assertTrue("unknown header was returned " + name, standardHeaders.remove(name));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assertTrue("some headers that were sent weren't returned: " + expectedHeaders, expectedHeaders.isEmpty());
|
|
||||||
assertTrue("some expected standard headers weren't returned: " + standardHeaders, standardHeaders.isEmpty());
|
assertTrue("some expected standard headers weren't returned: " + standardHeaders, standardHeaders.isEmpty());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,7 +56,6 @@ import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.Future;
|
import java.util.concurrent.Future;
|
||||||
|
@ -70,7 +69,6 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||||
import static org.junit.Assert.assertArrayEquals;
|
import static org.junit.Assert.assertArrayEquals;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertNotNull;
|
|
||||||
import static org.junit.Assert.assertThat;
|
import static org.junit.Assert.assertThat;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
@ -131,9 +129,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default");
|
||||||
int numHeaders = randomIntBetween(0, 3);
|
|
||||||
defaultHeaders = generateHeaders("Header-default", "Header-array", numHeaders);
|
|
||||||
httpHost = new HttpHost("localhost", 9200);
|
httpHost = new HttpHost("localhost", 9200);
|
||||||
failureListener = new HostsTrackingFailureListener();
|
failureListener = new HostsTrackingFailureListener();
|
||||||
restClient = new RestClient(httpClient, 10000, defaultHeaders, new HttpHost[]{httpHost}, null, failureListener);
|
restClient = new RestClient(httpClient, 10000, defaultHeaders, new HttpHost[]{httpHost}, null, failureListener);
|
||||||
|
@ -339,33 +335,16 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||||
*/
|
*/
|
||||||
public void testHeaders() throws IOException {
|
public void testHeaders() throws IOException {
|
||||||
for (String method : getHttpMethods()) {
|
for (String method : getHttpMethods()) {
|
||||||
final int numHeaders = randomIntBetween(1, 5);
|
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
|
||||||
final Header[] headers = generateHeaders("Header", null, numHeaders);
|
|
||||||
final Map<String, List<String>> expectedHeaders = new HashMap<>();
|
|
||||||
|
|
||||||
addHeaders(expectedHeaders, defaultHeaders, headers);
|
|
||||||
|
|
||||||
final int statusCode = randomStatusCode(getRandom());
|
final int statusCode = randomStatusCode(getRandom());
|
||||||
Response esResponse;
|
Response esResponse;
|
||||||
try {
|
try {
|
||||||
esResponse = restClient.performRequest(method, "/" + statusCode, headers);
|
esResponse = restClient.performRequest(method, "/" + statusCode, requestHeaders);
|
||||||
} catch(ResponseException e) {
|
} catch(ResponseException e) {
|
||||||
esResponse = e.getResponse();
|
esResponse = e.getResponse();
|
||||||
}
|
}
|
||||||
assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode));
|
assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode));
|
||||||
for (Header responseHeader : esResponse.getHeaders()) {
|
assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), Collections.<String>emptySet());
|
||||||
final String name = responseHeader.getName();
|
|
||||||
final String value = responseHeader.getValue();
|
|
||||||
final List<String> values = expectedHeaders.get(name);
|
|
||||||
assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values);
|
|
||||||
assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value));
|
|
||||||
|
|
||||||
// we've collected them all
|
|
||||||
if (values.isEmpty()) {
|
|
||||||
expectedHeaders.remove(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assertTrue("some headers that were sent weren't returned " + expectedHeaders, expectedHeaders.isEmpty());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -424,10 +403,9 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
Header[] headers = new Header[0];
|
Header[] headers = new Header[0];
|
||||||
final int numHeaders = randomIntBetween(1, 5);
|
final Set<String> uniqueNames = new HashSet<>();
|
||||||
final Set<String> uniqueNames = new HashSet<>(numHeaders);
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
headers = generateHeaders("Header", "Header-array", numHeaders);
|
headers = RestClientTestUtil.randomHeaders(getRandom(), "Header");
|
||||||
for (Header header : headers) {
|
for (Header header : headers) {
|
||||||
request.addHeader(header);
|
request.addHeader(header);
|
||||||
uniqueNames.add(header.getName());
|
uniqueNames.add(header.getName());
|
||||||
|
|
|
@ -30,16 +30,19 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
|
||||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
|
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
|
||||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
|
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
|
||||||
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
||||||
|
|
||||||
import org.apache.http.Header;
|
import org.apache.http.Header;
|
||||||
import org.apache.http.message.BasicHeader;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
@TestMethodProviders({
|
@TestMethodProviders({
|
||||||
JUnit3MethodProvider.class
|
JUnit3MethodProvider.class
|
||||||
})
|
})
|
||||||
|
@ -53,70 +56,56 @@ import java.util.Set;
|
||||||
public abstract class RestClientTestCase extends RandomizedTest {
|
public abstract class RestClientTestCase extends RandomizedTest {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create the specified number of {@link Header}s.
|
* Assert that the actual headers are the expected ones given the original default and request headers. Some headers can be ignored,
|
||||||
* <p>
|
* for instance in case the http client is adding its own automatically.
|
||||||
* Generated header names will be the {@code baseName} plus its index or, rarely, the {@code arrayName} if it's supplied.
|
|
||||||
*
|
*
|
||||||
* @param baseName The base name to use for all headers.
|
* @param defaultHeaders the default headers set to the REST client instance
|
||||||
* @param arrayName The optional ({@code null}able) array name to use randomly.
|
* @param requestHeaders the request headers sent with a particular request
|
||||||
* @param headers The number of headers to create.
|
* @param actualHeaders the actual headers as a result of the provided default and request headers
|
||||||
* @return Never {@code null}.
|
* @param ignoreHeaders header keys to be ignored as they are not part of default nor request headers, yet they
|
||||||
|
* will be part of the actual ones
|
||||||
*/
|
*/
|
||||||
protected static Header[] generateHeaders(final String baseName, final String arrayName, final int headers) {
|
protected static void assertHeaders(final Header[] defaultHeaders, final Header[] requestHeaders,
|
||||||
final Header[] generated = new Header[headers];
|
final Header[] actualHeaders, final Set<String> ignoreHeaders) {
|
||||||
for (int i = 0; i < headers; i++) {
|
final Map<String, List<String>> expectedHeaders = new HashMap<>();
|
||||||
String headerName = baseName + i;
|
final Set<String> requestHeaderKeys = new HashSet<>();
|
||||||
if (arrayName != null && rarely()) {
|
for (final Header header : requestHeaders) {
|
||||||
headerName = arrayName;
|
final String name = header.getName();
|
||||||
}
|
addValueToListEntry(expectedHeaders, name, header.getValue());
|
||||||
|
requestHeaderKeys.add(name);
|
||||||
generated[i] = new BasicHeader(headerName, randomAsciiOfLengthBetween(3, 10));
|
|
||||||
}
|
}
|
||||||
return generated;
|
for (final Header defaultHeader : defaultHeaders) {
|
||||||
|
final String name = defaultHeader.getName();
|
||||||
|
if (requestHeaderKeys.contains(name) == false) {
|
||||||
|
addValueToListEntry(expectedHeaders, name, defaultHeader.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Set<String> actualIgnoredHeaders = new HashSet<>();
|
||||||
|
for (Header responseHeader : actualHeaders) {
|
||||||
|
final String name = responseHeader.getName();
|
||||||
|
if (ignoreHeaders.contains(name)) {
|
||||||
|
expectedHeaders.remove(name);
|
||||||
|
actualIgnoredHeaders.add(name);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
final String value = responseHeader.getValue();
|
||||||
|
final List<String> values = expectedHeaders.get(name);
|
||||||
|
assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values);
|
||||||
|
assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value));
|
||||||
|
if (values.isEmpty()) {
|
||||||
|
expectedHeaders.remove(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertEquals("some headers meant to be ignored were not part of the actual headers", ignoreHeaders, actualIgnoredHeaders);
|
||||||
|
assertTrue("some headers that were sent weren't returned " + expectedHeaders, expectedHeaders.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
private static void addValueToListEntry(final Map<String, List<String>> map, final String name, final String value) {
|
||||||
* Create a new {@link List} within the {@code map} if none exists for {@code name} or append to the existing list.
|
|
||||||
*
|
|
||||||
* @param map The map to manipulate.
|
|
||||||
* @param name The name to create/append the list for.
|
|
||||||
* @param value The value to add.
|
|
||||||
*/
|
|
||||||
private static void createOrAppendList(final Map<String, List<String>> map, final String name, final String value) {
|
|
||||||
List<String> values = map.get(name);
|
List<String> values = map.get(name);
|
||||||
|
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
values = new ArrayList<>();
|
values = new ArrayList<>();
|
||||||
map.put(name, values);
|
map.put(name, values);
|
||||||
}
|
}
|
||||||
|
|
||||||
values.add(value);
|
values.add(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Add the {@code headers} to the {@code map} so that related tests can more easily assert that they exist.
|
|
||||||
* <p>
|
|
||||||
* If both the {@code defaultHeaders} and {@code headers} contain the same {@link Header}, based on its
|
|
||||||
* {@linkplain Header#getName() name}, then this will only use the {@code Header}(s) from {@code headers}.
|
|
||||||
*
|
|
||||||
* @param map The map to build with name/value(s) pairs.
|
|
||||||
* @param defaultHeaders The headers to add to the map representing default headers.
|
|
||||||
* @param headers The headers to add to the map representing request-level headers.
|
|
||||||
* @see #createOrAppendList(Map, String, String)
|
|
||||||
*/
|
|
||||||
protected static void addHeaders(final Map<String, List<String>> map, final Header[] defaultHeaders, final Header[] headers) {
|
|
||||||
final Set<String> uniqueHeaders = new HashSet<>();
|
|
||||||
for (final Header header : headers) {
|
|
||||||
final String name = header.getName();
|
|
||||||
createOrAppendList(map, name, header.getValue());
|
|
||||||
uniqueHeaders.add(name);
|
|
||||||
}
|
|
||||||
for (final Header defaultHeader : defaultHeaders) {
|
|
||||||
final String name = defaultHeader.getName();
|
|
||||||
if (uniqueHeaders.contains(name) == false) {
|
|
||||||
createOrAppendList(map, name, defaultHeader.getValue());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,11 @@
|
||||||
|
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
|
import org.apache.http.Header;
|
||||||
|
import org.apache.http.message.BasicHeader;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -81,4 +85,23 @@ final class RestClientTestUtil {
|
||||||
static List<Integer> getAllStatusCodes() {
|
static List<Integer> getAllStatusCodes() {
|
||||||
return ALL_STATUS_CODES;
|
return ALL_STATUS_CODES;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a random number of {@link Header}s.
|
||||||
|
* Generated header names will either be the {@code baseName} plus its index, or exactly the provided {@code baseName} so that the
|
||||||
|
* we test also support for multiple headers with same key and different values.
|
||||||
|
*/
|
||||||
|
static Header[] randomHeaders(Random random, final String baseName) {
|
||||||
|
int numHeaders = RandomNumbers.randomIntBetween(random, 0, 5);
|
||||||
|
final Header[] headers = new Header[numHeaders];
|
||||||
|
for (int i = 0; i < numHeaders; i++) {
|
||||||
|
String headerName = baseName;
|
||||||
|
//randomly exercise the code path that supports multiple headers with same key
|
||||||
|
if (random.nextBoolean()) {
|
||||||
|
headerName = headerName + i;
|
||||||
|
}
|
||||||
|
headers[i] = new BasicHeader(headerName, RandomStrings.randomAsciiOfLengthBetween(random, 3, 10));
|
||||||
|
}
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -349,7 +349,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
} else if ("fields".equals(currentFieldName)) {
|
} else if ("fields".equals(currentFieldName)) {
|
||||||
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
|
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
|
||||||
} else if ("_source".equals(currentFieldName)) {
|
} else if ("_source".equals(currentFieldName)) {
|
||||||
fetchSourceContext = FetchSourceContext.parse(parser);
|
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
|
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
|
@ -362,7 +362,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT && "_source".equals(currentFieldName)) {
|
} else if (token == XContentParser.Token.START_OBJECT && "_source".equals(currentFieldName)) {
|
||||||
fetchSourceContext = FetchSourceContext.parse(parser);
|
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||||
} else if (token != XContentParser.Token.VALUE_NULL) {
|
} else if (token != XContentParser.Token.VALUE_NULL) {
|
||||||
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.IndicesRequest;
|
import org.elasticsearch.action.IndicesRequest;
|
||||||
import org.elasticsearch.action.support.IndicesOptions;
|
import org.elasticsearch.action.support.IndicesOptions;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.action.support.WriteRequest;
|
||||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||||
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
|
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
|
@ -714,7 +713,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if ("script".equals(currentFieldName)) {
|
} else if ("script".equals(currentFieldName)) {
|
||||||
script = Script.parse(parser, ParseFieldMatcher.EMPTY);
|
script = Script.parse(parser);
|
||||||
} else if ("scripted_upsert".equals(currentFieldName)) {
|
} else if ("scripted_upsert".equals(currentFieldName)) {
|
||||||
scriptedUpsert = parser.booleanValue();
|
scriptedUpsert = parser.booleanValue();
|
||||||
} else if ("upsert".equals(currentFieldName)) {
|
} else if ("upsert".equals(currentFieldName)) {
|
||||||
|
@ -740,7 +739,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
||||||
fields(fields.toArray(new String[fields.size()]));
|
fields(fields.toArray(new String[fields.size()]));
|
||||||
}
|
}
|
||||||
} else if ("_source".equals(currentFieldName)) {
|
} else if ("_source".equals(currentFieldName)) {
|
||||||
fetchSourceContext = FetchSourceContext.parse(parser);
|
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (script != null) {
|
if (script != null) {
|
||||||
|
|
|
@ -22,8 +22,6 @@ package org.elasticsearch.cluster.metadata;
|
||||||
import org.elasticsearch.cluster.Diff;
|
import org.elasticsearch.cluster.Diff;
|
||||||
import org.elasticsearch.cluster.NamedDiff;
|
import org.elasticsearch.cluster.NamedDiff;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
@ -70,7 +68,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
||||||
|
|
||||||
public static final String TYPE = "index-graveyard";
|
public static final String TYPE = "index-graveyard";
|
||||||
private static final ParseField TOMBSTONES_FIELD = new ParseField("tombstones");
|
private static final ParseField TOMBSTONES_FIELD = new ParseField("tombstones");
|
||||||
private static final ObjectParser<List<Tombstone>, ParseFieldMatcherSupplier> GRAVEYARD_PARSER;
|
private static final ObjectParser<List<Tombstone>, Void> GRAVEYARD_PARSER;
|
||||||
static {
|
static {
|
||||||
GRAVEYARD_PARSER = new ObjectParser<>("index_graveyard", ArrayList::new);
|
GRAVEYARD_PARSER = new ObjectParser<>("index_graveyard", ArrayList::new);
|
||||||
GRAVEYARD_PARSER.declareObjectArray(List::addAll, Tombstone.getParser(), TOMBSTONES_FIELD);
|
GRAVEYARD_PARSER.declareObjectArray(List::addAll, Tombstone.getParser(), TOMBSTONES_FIELD);
|
||||||
|
@ -141,7 +139,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IndexGraveyard fromXContent(final XContentParser parser) throws IOException {
|
public static IndexGraveyard fromXContent(final XContentParser parser) throws IOException {
|
||||||
return new IndexGraveyard(GRAVEYARD_PARSER.parse(parser, () -> ParseFieldMatcher.STRICT));
|
return new IndexGraveyard(GRAVEYARD_PARSER.parse(parser, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -354,16 +352,17 @@ public final class IndexGraveyard implements MetaData.Custom {
|
||||||
private static final String INDEX_KEY = "index";
|
private static final String INDEX_KEY = "index";
|
||||||
private static final String DELETE_DATE_IN_MILLIS_KEY = "delete_date_in_millis";
|
private static final String DELETE_DATE_IN_MILLIS_KEY = "delete_date_in_millis";
|
||||||
private static final String DELETE_DATE_KEY = "delete_date";
|
private static final String DELETE_DATE_KEY = "delete_date";
|
||||||
private static final ObjectParser<Tombstone.Builder, ParseFieldMatcherSupplier> TOMBSTONE_PARSER;
|
private static final ObjectParser<Tombstone.Builder, Void> TOMBSTONE_PARSER;
|
||||||
static {
|
static {
|
||||||
TOMBSTONE_PARSER = new ObjectParser<>("tombstoneEntry", Tombstone.Builder::new);
|
TOMBSTONE_PARSER = new ObjectParser<>("tombstoneEntry", Tombstone.Builder::new);
|
||||||
TOMBSTONE_PARSER.declareObject(Tombstone.Builder::index, Index::parseIndex, new ParseField(INDEX_KEY));
|
TOMBSTONE_PARSER.declareObject(Tombstone.Builder::index, (parser, context) -> Index.fromXContent(parser),
|
||||||
|
new ParseField(INDEX_KEY));
|
||||||
TOMBSTONE_PARSER.declareLong(Tombstone.Builder::deleteDateInMillis, new ParseField(DELETE_DATE_IN_MILLIS_KEY));
|
TOMBSTONE_PARSER.declareLong(Tombstone.Builder::deleteDateInMillis, new ParseField(DELETE_DATE_IN_MILLIS_KEY));
|
||||||
TOMBSTONE_PARSER.declareString((b, s) -> {}, new ParseField(DELETE_DATE_KEY));
|
TOMBSTONE_PARSER.declareString((b, s) -> {}, new ParseField(DELETE_DATE_KEY));
|
||||||
}
|
}
|
||||||
|
|
||||||
static ContextParser<ParseFieldMatcherSupplier, Tombstone> getParser() {
|
static ContextParser<Void, Tombstone> getParser() {
|
||||||
return (p, c) -> TOMBSTONE_PARSER.apply(p, c).build();
|
return (parser, context) -> TOMBSTONE_PARSER.apply(parser, null).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
private final Index index;
|
private final Index index;
|
||||||
|
@ -438,7 +437,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Tombstone fromXContent(final XContentParser parser) throws IOException {
|
public static Tombstone fromXContent(final XContentParser parser) throws IOException {
|
||||||
return TOMBSTONE_PARSER.parse(parser, () -> ParseFieldMatcher.STRICT).build();
|
return TOMBSTONE_PARSER.parse(parser, null).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.common.xcontent;
|
package org.elasticsearch.common.xcontent;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
|
@ -34,7 +33,7 @@ import java.util.function.BiFunction;
|
||||||
/**
|
/**
|
||||||
* Superclass for {@link ObjectParser} and {@link ConstructingObjectParser}. Defines most of the "declare" methods so they can be shared.
|
* Superclass for {@link ObjectParser} and {@link ConstructingObjectParser}. Defines most of the "declare" methods so they can be shared.
|
||||||
*/
|
*/
|
||||||
public abstract class AbstractObjectParser<Value, Context extends ParseFieldMatcherSupplier>
|
public abstract class AbstractObjectParser<Value, Context>
|
||||||
implements BiFunction<XContentParser, Context, Value>, ContextParser<Context, Value> {
|
implements BiFunction<XContentParser, Context, Value>, ContextParser<Context, Value> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.common.xcontent;
|
package org.elasticsearch.common.xcontent;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||||
|
|
||||||
|
@ -74,7 +73,7 @@ import java.util.function.Function;
|
||||||
* Note: if optional constructor arguments aren't specified then the number of allocations is always the worst case.
|
* Note: if optional constructor arguments aren't specified then the number of allocations is always the worst case.
|
||||||
* </p>
|
* </p>
|
||||||
*/
|
*/
|
||||||
public final class ConstructingObjectParser<Value, Context extends ParseFieldMatcherSupplier> extends AbstractObjectParser<Value, Context> {
|
public final class ConstructingObjectParser<Value, Context> extends AbstractObjectParser<Value, Context> {
|
||||||
/**
|
/**
|
||||||
* Consumer that marks a field as a required constructor argument instead of a real object field.
|
* Consumer that marks a field as a required constructor argument instead of a real object field.
|
||||||
*/
|
*/
|
||||||
|
@ -236,7 +235,7 @@ public final class ConstructingObjectParser<Value, Context extends ParseFieldMat
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The target of the {@linkplain ConstructingObjectParser}. One of these is built every time you call
|
* The target of the {@linkplain ConstructingObjectParser}. One of these is built every time you call
|
||||||
* {@linkplain ConstructingObjectParser#apply(XContentParser, ParseFieldMatcherSupplier)} Note that it is not static so it inherits
|
* {@linkplain ConstructingObjectParser#apply(XContentParser, Object)} Note that it is not static so it inherits
|
||||||
* {@linkplain ConstructingObjectParser}'s type parameters.
|
* {@linkplain ConstructingObjectParser}'s type parameters.
|
||||||
*/
|
*/
|
||||||
private class Target {
|
private class Target {
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.common.xcontent;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -68,7 +67,7 @@ import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_STRIN
|
||||||
* It's highly recommended to use the high level declare methods like {@link #declareString(BiConsumer, ParseField)} instead of
|
* It's highly recommended to use the high level declare methods like {@link #declareString(BiConsumer, ParseField)} instead of
|
||||||
* {@link #declareField} which can be used to implement exceptional parsing operations not covered by the high level methods.
|
* {@link #declareField} which can be used to implement exceptional parsing operations not covered by the high level methods.
|
||||||
*/
|
*/
|
||||||
public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier> extends AbstractObjectParser<Value, Context> {
|
public final class ObjectParser<Value, Context> extends AbstractObjectParser<Value, Context> {
|
||||||
/**
|
/**
|
||||||
* Adapts an array (or varags) setter into a list setter.
|
* Adapts an array (or varags) setter into a list setter.
|
||||||
*/
|
*/
|
||||||
|
@ -153,7 +152,7 @@ public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
FieldParser<Value> fieldParser = null;
|
FieldParser fieldParser = null;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
|
@ -167,7 +166,7 @@ public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier
|
||||||
assert ignoreUnknownFields : "this should only be possible if configured to ignore known fields";
|
assert ignoreUnknownFields : "this should only be possible if configured to ignore known fields";
|
||||||
parser.skipChildren(); // noop if parser points to a value, skips children if parser is start object or start array
|
parser.skipChildren(); // noop if parser points to a value, skips children if parser is start object or start array
|
||||||
} else {
|
} else {
|
||||||
fieldParser.assertSupports(name, token, currentFieldName, context.getParseFieldMatcher());
|
fieldParser.assertSupports(name, token, currentFieldName);
|
||||||
parseSub(parser, fieldParser, currentFieldName, value, context);
|
parseSub(parser, fieldParser, currentFieldName, value, context);
|
||||||
}
|
}
|
||||||
fieldParser = null;
|
fieldParser = null;
|
||||||
|
@ -357,13 +356,13 @@ public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void parseArray(XContentParser parser, FieldParser<Value> fieldParser, String currentFieldName, Value value, Context context)
|
private void parseArray(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
assert parser.currentToken() == XContentParser.Token.START_ARRAY : "Token was: " + parser.currentToken();
|
assert parser.currentToken() == XContentParser.Token.START_ARRAY : "Token was: " + parser.currentToken();
|
||||||
parseValue(parser, fieldParser, currentFieldName, value, context);
|
parseValue(parser, fieldParser, currentFieldName, value, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void parseValue(XContentParser parser, FieldParser<Value> fieldParser, String currentFieldName, Value value, Context context)
|
private void parseValue(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
try {
|
try {
|
||||||
fieldParser.parser.parse(parser, value, context);
|
fieldParser.parser.parse(parser, value, context);
|
||||||
|
@ -372,7 +371,7 @@ public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void parseSub(XContentParser parser, FieldParser<Value> fieldParser, String currentFieldName, Value value, Context context)
|
private void parseSub(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final XContentParser.Token token = parser.currentToken();
|
final XContentParser.Token token = parser.currentToken();
|
||||||
switch (token) {
|
switch (token) {
|
||||||
|
@ -396,27 +395,27 @@ public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier
|
||||||
}
|
}
|
||||||
|
|
||||||
private FieldParser getParser(String fieldName) {
|
private FieldParser getParser(String fieldName) {
|
||||||
FieldParser<Value> parser = fieldParserMap.get(fieldName);
|
FieldParser parser = fieldParserMap.get(fieldName);
|
||||||
if (parser == null && false == ignoreUnknownFields) {
|
if (parser == null && false == ignoreUnknownFields) {
|
||||||
throw new IllegalArgumentException("[" + name + "] unknown field [" + fieldName + "], parser not found");
|
throw new IllegalArgumentException("[" + name + "] unknown field [" + fieldName + "], parser not found");
|
||||||
}
|
}
|
||||||
return parser;
|
return parser;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class FieldParser<T> {
|
private class FieldParser {
|
||||||
private final Parser parser;
|
private final Parser<Value, Context> parser;
|
||||||
private final EnumSet<XContentParser.Token> supportedTokens;
|
private final EnumSet<XContentParser.Token> supportedTokens;
|
||||||
private final ParseField parseField;
|
private final ParseField parseField;
|
||||||
private final ValueType type;
|
private final ValueType type;
|
||||||
|
|
||||||
public FieldParser(Parser parser, EnumSet<XContentParser.Token> supportedTokens, ParseField parseField, ValueType type) {
|
FieldParser(Parser<Value, Context> parser, EnumSet<XContentParser.Token> supportedTokens, ParseField parseField, ValueType type) {
|
||||||
this.parser = parser;
|
this.parser = parser;
|
||||||
this.supportedTokens = supportedTokens;
|
this.supportedTokens = supportedTokens;
|
||||||
this.parseField = parseField;
|
this.parseField = parseField;
|
||||||
this.type = type;
|
this.type = type;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void assertSupports(String parserName, XContentParser.Token token, String currentFieldName, ParseFieldMatcher matcher) {
|
void assertSupports(String parserName, XContentParser.Token token, String currentFieldName) {
|
||||||
if (parseField.match(currentFieldName) == false) {
|
if (parseField.match(currentFieldName) == false) {
|
||||||
throw new IllegalStateException("[" + parserName + "] parsefield doesn't accept: " + currentFieldName);
|
throw new IllegalStateException("[" + parserName + "] parsefield doesn't accept: " + currentFieldName);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,8 +21,6 @@ package org.elasticsearch.index;
|
||||||
|
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
@ -42,7 +40,7 @@ public class Index implements Writeable, ToXContent {
|
||||||
public static final Index[] EMPTY_ARRAY = new Index[0];
|
public static final Index[] EMPTY_ARRAY = new Index[0];
|
||||||
private static final String INDEX_UUID_KEY = "index_uuid";
|
private static final String INDEX_UUID_KEY = "index_uuid";
|
||||||
private static final String INDEX_NAME_KEY = "index_name";
|
private static final String INDEX_NAME_KEY = "index_name";
|
||||||
private static final ObjectParser<Builder, ParseFieldMatcherSupplier> INDEX_PARSER = new ObjectParser<>("index", Builder::new);
|
private static final ObjectParser<Builder, Void> INDEX_PARSER = new ObjectParser<>("index", Builder::new);
|
||||||
static {
|
static {
|
||||||
INDEX_PARSER.declareString(Builder::name, new ParseField(INDEX_NAME_KEY));
|
INDEX_PARSER.declareString(Builder::name, new ParseField(INDEX_NAME_KEY));
|
||||||
INDEX_PARSER.declareString(Builder::uuid, new ParseField(INDEX_UUID_KEY));
|
INDEX_PARSER.declareString(Builder::uuid, new ParseField(INDEX_UUID_KEY));
|
||||||
|
@ -118,11 +116,7 @@ public class Index implements Writeable, ToXContent {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Index fromXContent(final XContentParser parser) throws IOException {
|
public static Index fromXContent(final XContentParser parser) throws IOException {
|
||||||
return INDEX_PARSER.parse(parser, () -> ParseFieldMatcher.STRICT).build();
|
return INDEX_PARSER.parse(parser, null).build();
|
||||||
}
|
|
||||||
|
|
||||||
public static final Index parseIndex(final XContentParser parser, final ParseFieldMatcherSupplier supplier) {
|
|
||||||
return INDEX_PARSER.apply(parser, supplier).build();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -96,7 +96,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||||
ObjectParser.ValueType.OBJECT_ARRAY);
|
ObjectParser.ValueType.OBJECT_ARRAY);
|
||||||
PARSER.declareField((p, i, c) -> {
|
PARSER.declareField((p, i, c) -> {
|
||||||
try {
|
try {
|
||||||
i.setFetchSourceContext(FetchSourceContext.parse(c.parser()));
|
i.setFetchSourceContext(FetchSourceContext.fromXContent(c.parser()));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ParsingException(p.getTokenLocation(), "Could not parse inner _source definition", e);
|
throw new ParsingException(p.getTokenLocation(), "Could not parse inner _source definition", e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -100,7 +100,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
||||||
// skip
|
// skip
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, parseContext.getParseFieldMatcher(), parseContext.getDefaultScriptLanguage());
|
script = Script.parse(parser, parseContext.getDefaultScriptLanguage());
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
|
@ -110,7 +110,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
||||||
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName)) {
|
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName)) {
|
||||||
boost = parser.floatValue();
|
boost = parser.floatValue();
|
||||||
} else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
} else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, parseContext.getParseFieldMatcher(), parseContext.getDefaultScriptLanguage());
|
script = Script.parse(parser, parseContext.getDefaultScriptLanguage());
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,7 +112,7 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder<ScriptScore
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else {
|
} else {
|
||||||
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, parseContext.getParseFieldMatcher(), parseContext.getDefaultScriptLanguage());
|
script = Script.parse(parser, parseContext.getDefaultScriptLanguage());
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(), NAME + " query does not support [" + currentFieldName + "]");
|
throw new ParsingException(parser.getTokenLocation(), NAME + " query does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.Version;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
|
@ -475,7 +474,6 @@ public class BlobStoreIndexShardSnapshot implements ToXContent {
|
||||||
private static final ParseField PARSE_NUMBER_OF_FILES = new ParseField("number_of_files");
|
private static final ParseField PARSE_NUMBER_OF_FILES = new ParseField("number_of_files");
|
||||||
private static final ParseField PARSE_TOTAL_SIZE = new ParseField("total_size");
|
private static final ParseField PARSE_TOTAL_SIZE = new ParseField("total_size");
|
||||||
private static final ParseField PARSE_FILES = new ParseField("files");
|
private static final ParseField PARSE_FILES = new ParseField("files");
|
||||||
private static final ParseFieldMatcher parseFieldMatcher = ParseFieldMatcher.EMPTY;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Serializes shard snapshot metadata info into JSON
|
* Serializes shard snapshot metadata info into JSON
|
||||||
|
@ -559,5 +557,4 @@ public class BlobStoreIndexShardSnapshot implements ToXContent {
|
||||||
return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, Collections.unmodifiableList(indexFiles),
|
return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, Collections.unmodifiableList(indexFiles),
|
||||||
startTime, time, numberOfFiles, totalSize);
|
startTime, time, numberOfFiles, totalSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.snapshots.blobstore;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
@ -153,7 +152,6 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
|
||||||
static final ParseField FILES = new ParseField("files");
|
static final ParseField FILES = new ParseField("files");
|
||||||
static final ParseField SNAPSHOTS = new ParseField("snapshots");
|
static final ParseField SNAPSHOTS = new ParseField("snapshots");
|
||||||
}
|
}
|
||||||
private static final ParseFieldMatcher parseFieldMatcher = ParseFieldMatcher.EMPTY;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Writes index file for the shard in the following format.
|
* Writes index file for the shard in the following format.
|
||||||
|
|
|
@ -107,25 +107,17 @@ public class RecoveriesCollection {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Closes the current recovery target
|
// Closes the current recovery target
|
||||||
final AtomicBoolean successfulReset = new AtomicBoolean();
|
boolean successfulReset = oldRecoveryTarget.resetRecovery(newRecoveryTarget.CancellableThreads());
|
||||||
try {
|
if (successfulReset) {
|
||||||
final RecoveryTarget finalOldRecoveryTarget = oldRecoveryTarget;
|
|
||||||
newRecoveryTarget.CancellableThreads().executeIO(() -> successfulReset.set(finalOldRecoveryTarget.resetRecovery()));
|
|
||||||
} catch (CancellableThreads.ExecutionCancelledException e) {
|
|
||||||
// new recovery target is already cancelled (probably due to shard closing or recovery source changing)
|
|
||||||
assert onGoingRecoveries.containsKey(newRecoveryTarget.recoveryId()) == false;
|
|
||||||
logger.trace("{} recovery reset cancelled, recovery from {}, id [{}], previous id [{}]", newRecoveryTarget.shardId(),
|
|
||||||
newRecoveryTarget.sourceNode(), newRecoveryTarget.recoveryId(), oldRecoveryTarget.recoveryId());
|
|
||||||
oldRecoveryTarget.cancel("recovery reset cancelled"); // if finalOldRecoveryTarget.resetRecovery did not even get to execute
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (successfulReset.get() == false) {
|
|
||||||
cancelRecovery(newRecoveryTarget.recoveryId(), "failed to reset recovery");
|
|
||||||
return null;
|
|
||||||
} else {
|
|
||||||
logger.trace("{} restarted recovery from {}, id [{}], previous id [{}]", newRecoveryTarget.shardId(),
|
logger.trace("{} restarted recovery from {}, id [{}], previous id [{}]", newRecoveryTarget.shardId(),
|
||||||
newRecoveryTarget.sourceNode(), newRecoveryTarget.recoveryId(), oldRecoveryTarget.recoveryId());
|
newRecoveryTarget.sourceNode(), newRecoveryTarget.recoveryId(), oldRecoveryTarget.recoveryId());
|
||||||
return newRecoveryTarget;
|
return newRecoveryTarget;
|
||||||
|
} else {
|
||||||
|
logger.trace("{} recovery could not be reset as it is already cancelled, recovery from {}, id [{}], previous id [{}]",
|
||||||
|
newRecoveryTarget.shardId(), newRecoveryTarget.sourceNode(), newRecoveryTarget.recoveryId(),
|
||||||
|
oldRecoveryTarget.recoveryId());
|
||||||
|
cancelRecovery(newRecoveryTarget.recoveryId(), "recovery cancelled during reset");
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// fail shard to be safe
|
// fail shard to be safe
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.common.UUIDs;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.Callback;
|
import org.elasticsearch.common.util.Callback;
|
||||||
import org.elasticsearch.common.util.CancellableThreads;
|
import org.elasticsearch.common.util.CancellableThreads;
|
||||||
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
|
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
|
||||||
|
@ -56,6 +57,8 @@ import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
|
||||||
|
@ -182,17 +185,21 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget
|
||||||
* Closes the current recovery target and waits up to a certain timeout for resources to be freed.
|
* Closes the current recovery target and waits up to a certain timeout for resources to be freed.
|
||||||
* Returns true if resetting the recovery was successful, false if the recovery target is already cancelled / failed or marked as done.
|
* Returns true if resetting the recovery was successful, false if the recovery target is already cancelled / failed or marked as done.
|
||||||
*/
|
*/
|
||||||
boolean resetRecovery() throws InterruptedException, IOException {
|
boolean resetRecovery(CancellableThreads newTargetCancellableThreads) throws IOException {
|
||||||
if (finished.compareAndSet(false, true)) {
|
if (finished.compareAndSet(false, true)) {
|
||||||
try {
|
try {
|
||||||
// yes, this is just a logger call in a try-finally block. The reason for this is that resetRecovery is called from
|
|
||||||
// CancellableThreads and we have to make sure that all references to IndexShard are cleaned up before exiting this method
|
|
||||||
logger.debug("reset of recovery with shard {} and id [{}]", shardId, recoveryId);
|
logger.debug("reset of recovery with shard {} and id [{}]", shardId, recoveryId);
|
||||||
} finally {
|
} finally {
|
||||||
// release the initial reference. recovery files will be cleaned as soon as ref count goes to zero, potentially now.
|
// release the initial reference. recovery files will be cleaned as soon as ref count goes to zero, potentially now.
|
||||||
decRef();
|
decRef();
|
||||||
}
|
}
|
||||||
closedLatch.await();
|
try {
|
||||||
|
newTargetCancellableThreads.execute(closedLatch::await);
|
||||||
|
} catch (CancellableThreads.ExecutionCancelledException e) {
|
||||||
|
logger.trace("new recovery target cancelled for shard {} while waiting on old recovery target with id [{}] to close",
|
||||||
|
shardId, recoveryId);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
RecoveryState.Stage stage = indexShard.recoveryState().getStage();
|
RecoveryState.Stage stage = indexShard.recoveryState().getStage();
|
||||||
if (indexShard.recoveryState().getPrimary() && (stage == RecoveryState.Stage.FINALIZE || stage == RecoveryState.Stage.DONE)) {
|
if (indexShard.recoveryState().getPrimary() && (stage == RecoveryState.Stage.FINALIZE || stage == RecoveryState.Stage.DONE)) {
|
||||||
// once primary relocation has moved past the finalization step, the relocation source can be moved to RELOCATED state
|
// once primary relocation has moved past the finalization step, the relocation source can be moved to RELOCATED state
|
||||||
|
|
|
@ -24,8 +24,6 @@ import org.elasticsearch.cluster.DiffableUtils;
|
||||||
import org.elasticsearch.cluster.NamedDiff;
|
import org.elasticsearch.cluster.NamedDiff;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
@ -47,7 +45,7 @@ public final class IngestMetadata implements MetaData.Custom {
|
||||||
|
|
||||||
public static final String TYPE = "ingest";
|
public static final String TYPE = "ingest";
|
||||||
private static final ParseField PIPELINES_FIELD = new ParseField("pipeline");
|
private static final ParseField PIPELINES_FIELD = new ParseField("pipeline");
|
||||||
private static final ObjectParser<List<PipelineConfiguration>, ParseFieldMatcherSupplier> INGEST_METADATA_PARSER = new ObjectParser<>(
|
private static final ObjectParser<List<PipelineConfiguration>, Void> INGEST_METADATA_PARSER = new ObjectParser<>(
|
||||||
"ingest_metadata", ArrayList::new);
|
"ingest_metadata", ArrayList::new);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
|
@ -95,7 +93,7 @@ public final class IngestMetadata implements MetaData.Custom {
|
||||||
|
|
||||||
public static IngestMetadata fromXContent(XContentParser parser) throws IOException {
|
public static IngestMetadata fromXContent(XContentParser parser) throws IOException {
|
||||||
Map<String, PipelineConfiguration> pipelines = new HashMap<>();
|
Map<String, PipelineConfiguration> pipelines = new HashMap<>();
|
||||||
List<PipelineConfiguration> configs = INGEST_METADATA_PARSER.parse(parser, () -> ParseFieldMatcher.STRICT);
|
List<PipelineConfiguration> configs = INGEST_METADATA_PARSER.parse(parser, null);
|
||||||
for (PipelineConfiguration pipeline : configs) {
|
for (PipelineConfiguration pipeline : configs) {
|
||||||
pipelines.put(pipeline.getId(), pipeline);
|
pipelines.put(pipeline.getId(), pipeline);
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,8 +50,8 @@ public final class PipelineConfiguration extends AbstractDiffable<PipelineConfig
|
||||||
}, new ParseField("config"), ObjectParser.ValueType.OBJECT);
|
}, new ParseField("config"), ObjectParser.ValueType.OBJECT);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ContextParser<ParseFieldMatcherSupplier, PipelineConfiguration> getParser() {
|
public static ContextParser<Void, PipelineConfiguration> getParser() {
|
||||||
return (p, c) -> PARSER.apply(p ,c).build();
|
return (parser, context) -> PARSER.apply(parser, null).build();
|
||||||
}
|
}
|
||||||
private static class Builder {
|
private static class Builder {
|
||||||
|
|
||||||
|
|
|
@ -21,8 +21,6 @@ package org.elasticsearch.script;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -35,7 +33,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.UncheckedIOException;
|
import java.io.UncheckedIOException;
|
||||||
|
@ -211,7 +208,7 @@ public final class Script implements ToXContent, Writeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final ObjectParser<Builder, ParseFieldMatcherSupplier> PARSER = new ObjectParser<>("script", Builder::new);
|
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("script", Builder::new);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
// Defines the fields necessary to parse a Script as XContent using an ObjectParser.
|
// Defines the fields necessary to parse a Script as XContent using an ObjectParser.
|
||||||
|
@ -224,19 +221,11 @@ public final class Script implements ToXContent, Writeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convenience method to call {@link Script#parse(XContentParser, ParseFieldMatcher, String)}
|
* Convenience method to call {@link Script#parse(XContentParser, String)}
|
||||||
* using the default scripting language.
|
* using the default scripting language.
|
||||||
*/
|
*/
|
||||||
public static Script parse(XContentParser parser, ParseFieldMatcher matcher) throws IOException {
|
public static Script parse(XContentParser parser) throws IOException {
|
||||||
return parse(parser, matcher, DEFAULT_SCRIPT_LANG);
|
return parse(parser, DEFAULT_SCRIPT_LANG);
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convenience method to call {@link Script#parse(XContentParser, ParseFieldMatcher, String)} using the
|
|
||||||
* {@link ParseFieldMatcher} and scripting language provided by the {@link QueryParseContext}.
|
|
||||||
*/
|
|
||||||
public static Script parse(XContentParser parser, QueryParseContext context) throws IOException {
|
|
||||||
return parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -300,13 +289,12 @@ public final class Script implements ToXContent, Writeable {
|
||||||
* }
|
* }
|
||||||
*
|
*
|
||||||
* @param parser The {@link XContentParser} to be used.
|
* @param parser The {@link XContentParser} to be used.
|
||||||
* @param matcher The {@link ParseFieldMatcher} to be used.
|
|
||||||
* @param defaultLang The default language to use if no language is specified. The default language isn't necessarily
|
* @param defaultLang The default language to use if no language is specified. The default language isn't necessarily
|
||||||
* the one defined by {@link Script#DEFAULT_SCRIPT_LANG} due to backwards compatiblity requirements
|
* the one defined by {@link Script#DEFAULT_SCRIPT_LANG} due to backwards compatiblity requirements
|
||||||
* related to stored queries using previously default languauges.
|
* related to stored queries using previously default languauges.
|
||||||
* @return The parsed {@link Script}.
|
* @return The parsed {@link Script}.
|
||||||
*/
|
*/
|
||||||
public static Script parse(XContentParser parser, ParseFieldMatcher matcher, String defaultLang) throws IOException {
|
public static Script parse(XContentParser parser, String defaultLang) throws IOException {
|
||||||
Objects.requireNonNull(defaultLang);
|
Objects.requireNonNull(defaultLang);
|
||||||
|
|
||||||
Token token = parser.currentToken();
|
Token token = parser.currentToken();
|
||||||
|
@ -319,7 +307,7 @@ public final class Script implements ToXContent, Writeable {
|
||||||
return new Script(ScriptType.INLINE, defaultLang, parser.text(), Collections.emptyMap());
|
return new Script(ScriptType.INLINE, defaultLang, parser.text(), Collections.emptyMap());
|
||||||
}
|
}
|
||||||
|
|
||||||
return PARSER.apply(parser, () -> matcher).build(defaultLang);
|
return PARSER.apply(parser, null).build(defaultLang);
|
||||||
}
|
}
|
||||||
|
|
||||||
private final ScriptType type;
|
private final ScriptType type;
|
||||||
|
|
|
@ -57,7 +57,7 @@ public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregati
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
||||||
return Range.fromXContent(parser, context.getParseFieldMatcher());
|
return Range.fromXContent(parser);
|
||||||
}
|
}
|
||||||
|
|
||||||
public RangeAggregationBuilder(String name) {
|
public RangeAggregationBuilder(String name) {
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.range;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
@ -122,7 +121,7 @@ public class RangeAggregator extends BucketsAggregator {
|
||||||
return new Range(key, from, fromAsStr, to, toAsStr);
|
return new Range(key, from, fromAsStr, to, toAsStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Range fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
public static Range fromXContent(XContentParser parser) throws IOException {
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
double from = Double.NEGATIVE_INFINITY;
|
double from = Double.NEGATIVE_INFINITY;
|
||||||
|
|
|
@ -60,7 +60,7 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeA
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
||||||
return Range.fromXContent(parser, context.getParseFieldMatcher());
|
return Range.fromXContent(parser);
|
||||||
}
|
}
|
||||||
|
|
||||||
public DateRangeAggregationBuilder(String name) {
|
public DateRangeAggregationBuilder(String name) {
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
|
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
|
@ -32,8 +31,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||||
|
@ -168,7 +167,6 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
||||||
ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher();
|
|
||||||
String fromAsStr = null;
|
String fromAsStr = null;
|
||||||
String toAsStr = null;
|
String toAsStr = null;
|
||||||
double from = 0.0;
|
double from = 0.0;
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.range.ip;
|
||||||
import org.apache.lucene.document.InetAddressPoint;
|
import org.apache.lucene.document.InetAddressPoint;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -33,8 +32,8 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator;
|
import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator;
|
||||||
|
@ -81,7 +80,6 @@ public final class IpRangeAggregationBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException {
|
||||||
final ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher();
|
|
||||||
String key = null;
|
String key = null;
|
||||||
String from = null;
|
String from = null;
|
||||||
String to = null;
|
String to = null;
|
||||||
|
|
|
@ -158,7 +158,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else {
|
} else {
|
||||||
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName);
|
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.terms.support;
|
||||||
import com.carrotsearch.hppc.BitMixer;
|
import com.carrotsearch.hppc.BitMixer;
|
||||||
import com.carrotsearch.hppc.LongHashSet;
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
import com.carrotsearch.hppc.LongSet;
|
import com.carrotsearch.hppc.LongSet;
|
||||||
|
|
||||||
import org.apache.lucene.index.RandomAccessOrds;
|
import org.apache.lucene.index.RandomAccessOrds;
|
||||||
import org.apache.lucene.index.SortedSetDocValues;
|
import org.apache.lucene.index.SortedSetDocValues;
|
||||||
import org.apache.lucene.index.Terms;
|
import org.apache.lucene.index.Terms;
|
||||||
|
@ -39,7 +38,6 @@ import org.apache.lucene.util.automaton.RegExp;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
@ -103,7 +101,6 @@ public class IncludeExclude implements Writeable, ToXContent {
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
return new IncludeExclude(new TreeSet<>(parseArrayToSet(parser)), null);
|
return new IncludeExclude(new TreeSet<>(parseArrayToSet(parser)), null);
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher();
|
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
Integer partition = null, numPartitions = null;
|
Integer partition = null, numPartitions = null;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
|
|
|
@ -38,7 +38,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -255,13 +254,13 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.VALUE_STRING) {
|
} else if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.VALUE_STRING) {
|
||||||
if (INIT_SCRIPT_FIELD.match(currentFieldName)) {
|
if (INIT_SCRIPT_FIELD.match(currentFieldName)) {
|
||||||
initScript = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
initScript = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else if (MAP_SCRIPT_FIELD.match(currentFieldName)) {
|
} else if (MAP_SCRIPT_FIELD.match(currentFieldName)) {
|
||||||
mapScript = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
mapScript = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else if (COMBINE_SCRIPT_FIELD.match(currentFieldName)) {
|
} else if (COMBINE_SCRIPT_FIELD.match(currentFieldName)) {
|
||||||
combineScript = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
combineScript = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else if (REDUCE_SCRIPT_FIELD.match(currentFieldName)) {
|
} else if (REDUCE_SCRIPT_FIELD.match(currentFieldName)) {
|
||||||
reduceScript = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
reduceScript = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else if (token == XContentParser.Token.START_OBJECT &&
|
} else if (token == XContentParser.Token.START_OBJECT &&
|
||||||
PARAMS_FIELD.match(currentFieldName)) {
|
PARAMS_FIELD.match(currentFieldName)) {
|
||||||
params = parser.map();
|
params = parser.map();
|
||||||
|
|
|
@ -616,7 +616,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
||||||
} else if (SearchSourceBuilder.TRACK_SCORES_FIELD.match(currentFieldName)) {
|
} else if (SearchSourceBuilder.TRACK_SCORES_FIELD.match(currentFieldName)) {
|
||||||
factory.trackScores(parser.booleanValue());
|
factory.trackScores(parser.booleanValue());
|
||||||
} else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
|
} else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
|
||||||
factory.fetchSource(FetchSourceContext.parse(context.parser()));
|
factory.fetchSource(FetchSourceContext.fromXContent(context.parser()));
|
||||||
} else if (SearchSourceBuilder.STORED_FIELDS_FIELD.match(currentFieldName)) {
|
} else if (SearchSourceBuilder.STORED_FIELDS_FIELD.match(currentFieldName)) {
|
||||||
factory.storedFieldsContext =
|
factory.storedFieldsContext =
|
||||||
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
|
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
|
||||||
|
@ -628,7 +628,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
|
if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
|
||||||
factory.fetchSource(FetchSourceContext.parse(context.parser()));
|
factory.fetchSource(FetchSourceContext.fromXContent(context.parser()));
|
||||||
} else if (SearchSourceBuilder.SCRIPT_FIELDS_FIELD.match(currentFieldName)) {
|
} else if (SearchSourceBuilder.SCRIPT_FIELDS_FIELD.match(currentFieldName)) {
|
||||||
List<ScriptField> scriptFields = new ArrayList<>();
|
List<ScriptField> scriptFields = new ArrayList<>();
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
|
@ -642,7 +642,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if (SearchSourceBuilder.SCRIPT_FIELD.match(currentFieldName)) {
|
if (SearchSourceBuilder.SCRIPT_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else if (SearchSourceBuilder.IGNORE_FAILURE_FIELD.match(currentFieldName)) {
|
} else if (SearchSourceBuilder.IGNORE_FAILURE_FIELD.match(currentFieldName)) {
|
||||||
ignoreFailure = parser.booleanValue();
|
ignoreFailure = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
|
@ -652,7 +652,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if (SearchSourceBuilder.SCRIPT_FIELD.match(currentFieldName)) {
|
if (SearchSourceBuilder.SCRIPT_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unknown key for a " + token + " in [" + currentFieldName + "].",
|
"Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||||
|
@ -699,7 +699,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
||||||
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(context);
|
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(context);
|
||||||
factory.sorts(sorts);
|
factory.sorts(sorts);
|
||||||
} else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
|
} else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
|
||||||
factory.fetchSource(FetchSourceContext.parse(context.parser()));
|
factory.fetchSource(FetchSourceContext.fromXContent(context.parser()));
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
|
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||||
parser.getTokenLocation());
|
parser.getTokenLocation());
|
||||||
|
|
|
@ -178,7 +178,7 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
|
||||||
} else if (GAP_POLICY.match(currentFieldName)) {
|
} else if (GAP_POLICY.match(currentFieldName)) {
|
||||||
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
|
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
|
||||||
} else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
} else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "].");
|
"Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "].");
|
||||||
|
@ -200,7 +200,7 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else if (BUCKETS_PATH.match(currentFieldName)) {
|
} else if (BUCKETS_PATH.match(currentFieldName)) {
|
||||||
Map<String, Object> map = parser.map();
|
Map<String, Object> map = parser.map();
|
||||||
bucketsPathsMap = new HashMap<>();
|
bucketsPathsMap = new HashMap<>();
|
||||||
|
|
|
@ -141,7 +141,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
|
||||||
} else if (GAP_POLICY.match(currentFieldName)) {
|
} else if (GAP_POLICY.match(currentFieldName)) {
|
||||||
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
|
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
|
||||||
} else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
} else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "].");
|
"Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "].");
|
||||||
|
@ -163,7 +163,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else if (BUCKETS_PATH.match(currentFieldName)) {
|
} else if (BUCKETS_PATH.match(currentFieldName)) {
|
||||||
Map<String, Object> map = parser.map();
|
Map<String, Object> map = parser.map();
|
||||||
bucketsPathsMap = new HashMap<>();
|
bucketsPathsMap = new HashMap<>();
|
||||||
|
|
|
@ -406,7 +406,7 @@ public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregatio
|
||||||
MovAvgModel.AbstractModelParser modelParser = movingAverageMdelParserRegistry.lookup(model, parser.getTokenLocation());
|
MovAvgModel.AbstractModelParser modelParser = movingAverageMdelParserRegistry.lookup(model, parser.getTokenLocation());
|
||||||
MovAvgModel movAvgModel;
|
MovAvgModel movAvgModel;
|
||||||
try {
|
try {
|
||||||
movAvgModel = modelParser.parse(settings, pipelineAggregatorName, factory.window(), context.getParseFieldMatcher());
|
movAvgModel = modelParser.parse(settings, pipelineAggregatorName, factory.window());
|
||||||
} catch (ParseException exception) {
|
} catch (ParseException exception) {
|
||||||
throw new ParsingException(parser.getTokenLocation(), "Could not parse settings for model [" + model + "].", exception);
|
throw new ParsingException(parser.getTokenLocation(), "Could not parse settings for model [" + model + "].", exception);
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
||||||
|
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -127,8 +126,7 @@ public class EwmaModel extends MovAvgModel {
|
||||||
|
|
||||||
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
||||||
@Override
|
@Override
|
||||||
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize,
|
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
|
||||||
ParseFieldMatcher parseFieldMatcher) throws ParseException {
|
|
||||||
double alpha = parseDoubleParam(settings, "alpha", DEFAULT_ALPHA);
|
double alpha = parseDoubleParam(settings, "alpha", DEFAULT_ALPHA);
|
||||||
checkUnrecognizedParams(settings);
|
checkUnrecognizedParams(settings);
|
||||||
return new EwmaModel(alpha);
|
return new EwmaModel(alpha);
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
||||||
|
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -191,8 +190,7 @@ public class HoltLinearModel extends MovAvgModel {
|
||||||
|
|
||||||
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
||||||
@Override
|
@Override
|
||||||
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize,
|
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
|
||||||
ParseFieldMatcher parseFieldMatcher) throws ParseException {
|
|
||||||
|
|
||||||
double alpha = parseDoubleParam(settings, "alpha", DEFAULT_ALPHA);
|
double alpha = parseDoubleParam(settings, "alpha", DEFAULT_ALPHA);
|
||||||
double beta = parseDoubleParam(settings, "beta", DEFAULT_BETA);
|
double beta = parseDoubleParam(settings, "beta", DEFAULT_BETA);
|
||||||
|
|
|
@ -23,7 +23,6 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -59,11 +58,10 @@ public class HoltWintersModel extends MovAvgModel {
|
||||||
* Parse a string SeasonalityType into the byte enum
|
* Parse a string SeasonalityType into the byte enum
|
||||||
*
|
*
|
||||||
* @param text SeasonalityType in string format (e.g. "add")
|
* @param text SeasonalityType in string format (e.g. "add")
|
||||||
* @param parseFieldMatcher Matcher for field names
|
|
||||||
* @return SeasonalityType enum
|
* @return SeasonalityType enum
|
||||||
*/
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
public static SeasonalityType parse(String text, ParseFieldMatcher parseFieldMatcher) {
|
public static SeasonalityType parse(String text) {
|
||||||
if (text == null) {
|
if (text == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -379,8 +377,7 @@ public class HoltWintersModel extends MovAvgModel {
|
||||||
|
|
||||||
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
||||||
@Override
|
@Override
|
||||||
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize,
|
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
|
||||||
ParseFieldMatcher parseFieldMatcher) throws ParseException {
|
|
||||||
|
|
||||||
double alpha = parseDoubleParam(settings, "alpha", DEFAULT_ALPHA);
|
double alpha = parseDoubleParam(settings, "alpha", DEFAULT_ALPHA);
|
||||||
double beta = parseDoubleParam(settings, "beta", DEFAULT_BETA);
|
double beta = parseDoubleParam(settings, "beta", DEFAULT_BETA);
|
||||||
|
@ -399,7 +396,7 @@ public class HoltWintersModel extends MovAvgModel {
|
||||||
Object value = settings.get("type");
|
Object value = settings.get("type");
|
||||||
if (value != null) {
|
if (value != null) {
|
||||||
if (value instanceof String) {
|
if (value instanceof String) {
|
||||||
seasonalityType = SeasonalityType.parse((String)value, parseFieldMatcher);
|
seasonalityType = SeasonalityType.parse((String)value);
|
||||||
settings.remove("type");
|
settings.remove("type");
|
||||||
} else {
|
} else {
|
||||||
throw new ParseException("Parameter [type] must be a String, type `"
|
throw new ParseException("Parameter [type] must be a String, type `"
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
||||||
|
|
||||||
|
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -106,8 +105,7 @@ public class LinearModel extends MovAvgModel {
|
||||||
|
|
||||||
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
||||||
@Override
|
@Override
|
||||||
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize,
|
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
|
||||||
ParseFieldMatcher parseFieldMatcher) throws ParseException {
|
|
||||||
checkUnrecognizedParams(settings);
|
checkUnrecognizedParams(settings);
|
||||||
return new LinearModel();
|
return new LinearModel();
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
||||||
|
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
@ -143,11 +142,10 @@ public abstract class MovAvgModel implements NamedWriteable, ToXContent {
|
||||||
* @param settings Map of settings, extracted from the request
|
* @param settings Map of settings, extracted from the request
|
||||||
* @param pipelineName Name of the parent pipeline agg
|
* @param pipelineName Name of the parent pipeline agg
|
||||||
* @param windowSize Size of the window for this moving avg
|
* @param windowSize Size of the window for this moving avg
|
||||||
* @param parseFieldMatcher Matcher for field names
|
|
||||||
* @return A fully built moving average model
|
* @return A fully built moving average model
|
||||||
*/
|
*/
|
||||||
public abstract MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName,
|
public abstract MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName,
|
||||||
int windowSize, ParseFieldMatcher parseFieldMatcher) throws ParseException;
|
int windowSize) throws ParseException;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
|
||||||
|
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -99,8 +98,7 @@ public class SimpleModel extends MovAvgModel {
|
||||||
|
|
||||||
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
public static final AbstractModelParser PARSER = new AbstractModelParser() {
|
||||||
@Override
|
@Override
|
||||||
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize,
|
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
|
||||||
ParseFieldMatcher parseFieldMatcher) throws ParseException {
|
|
||||||
checkUnrecognizedParams(settings);
|
checkUnrecognizedParams(settings);
|
||||||
return new SimpleModel();
|
return new SimpleModel();
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,30 +35,30 @@ public final class ValuesSourceParserHelper {
|
||||||
public static void declareAnyFields(
|
public static void declareAnyFields(
|
||||||
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource, ?>, QueryParseContext> objectParser,
|
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource, ?>, QueryParseContext> objectParser,
|
||||||
boolean scriptable, boolean formattable) {
|
boolean scriptable, boolean formattable) {
|
||||||
declareFields(objectParser, scriptable, formattable, false, ValuesSourceType.ANY, null);
|
declareFields(objectParser, scriptable, formattable, false, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void declareNumericFields(
|
public static void declareNumericFields(
|
||||||
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, ?>, QueryParseContext> objectParser,
|
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, ?>, QueryParseContext> objectParser,
|
||||||
boolean scriptable, boolean formattable, boolean timezoneAware) {
|
boolean scriptable, boolean formattable, boolean timezoneAware) {
|
||||||
declareFields(objectParser, scriptable, formattable, timezoneAware, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
declareFields(objectParser, scriptable, formattable, timezoneAware, ValueType.NUMERIC);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void declareBytesFields(
|
public static void declareBytesFields(
|
||||||
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, ?>, QueryParseContext> objectParser,
|
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, ?>, QueryParseContext> objectParser,
|
||||||
boolean scriptable, boolean formattable) {
|
boolean scriptable, boolean formattable) {
|
||||||
declareFields(objectParser, scriptable, formattable, false, ValuesSourceType.BYTES, ValueType.STRING);
|
declareFields(objectParser, scriptable, formattable, false, ValueType.STRING);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void declareGeoFields(
|
public static void declareGeoFields(
|
||||||
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, ?>, QueryParseContext> objectParser,
|
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, ?>, QueryParseContext> objectParser,
|
||||||
boolean scriptable, boolean formattable) {
|
boolean scriptable, boolean formattable) {
|
||||||
declareFields(objectParser, scriptable, formattable, false, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
declareFields(objectParser, scriptable, formattable, false, ValueType.GEOPOINT);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <VS extends ValuesSource> void declareFields(
|
private static <VS extends ValuesSource> void declareFields(
|
||||||
ObjectParser<? extends ValuesSourceAggregationBuilder<VS, ?>, QueryParseContext> objectParser,
|
ObjectParser<? extends ValuesSourceAggregationBuilder<VS, ?>, QueryParseContext> objectParser,
|
||||||
boolean scriptable, boolean formattable, boolean timezoneAware, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
boolean scriptable, boolean formattable, boolean timezoneAware, ValueType targetValueType) {
|
||||||
|
|
||||||
|
|
||||||
objectParser.declareField(ValuesSourceAggregationBuilder::field, XContentParser::text,
|
objectParser.declareField(ValuesSourceAggregationBuilder::field, XContentParser::text,
|
||||||
|
@ -84,7 +84,8 @@ public final class ValuesSourceParserHelper {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (scriptable) {
|
if (scriptable) {
|
||||||
objectParser.declareField(ValuesSourceAggregationBuilder::script, org.elasticsearch.script.Script::parse,
|
objectParser.declareField(ValuesSourceAggregationBuilder::script,
|
||||||
|
(parser, context) -> Script.parse(parser, context.getDefaultScriptLanguage()),
|
||||||
Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING);
|
Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -948,7 +948,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||||
} else if (TRACK_SCORES_FIELD.match(currentFieldName)) {
|
} else if (TRACK_SCORES_FIELD.match(currentFieldName)) {
|
||||||
trackScores = parser.booleanValue();
|
trackScores = parser.booleanValue();
|
||||||
} else if (_SOURCE_FIELD.match(currentFieldName)) {
|
} else if (_SOURCE_FIELD.match(currentFieldName)) {
|
||||||
fetchSourceContext = FetchSourceContext.parse(context.parser());
|
fetchSourceContext = FetchSourceContext.fromXContent(context.parser());
|
||||||
} else if (STORED_FIELDS_FIELD.match(currentFieldName)) {
|
} else if (STORED_FIELDS_FIELD.match(currentFieldName)) {
|
||||||
storedFieldsContext =
|
storedFieldsContext =
|
||||||
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
|
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
|
||||||
|
@ -970,7 +970,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||||
} else if (POST_FILTER_FIELD.match(currentFieldName)) {
|
} else if (POST_FILTER_FIELD.match(currentFieldName)) {
|
||||||
postQueryBuilder = context.parseInnerQueryBuilder();
|
postQueryBuilder = context.parseInnerQueryBuilder();
|
||||||
} else if (_SOURCE_FIELD.match(currentFieldName)) {
|
} else if (_SOURCE_FIELD.match(currentFieldName)) {
|
||||||
fetchSourceContext = FetchSourceContext.parse(context.parser());
|
fetchSourceContext = FetchSourceContext.fromXContent(context.parser());
|
||||||
} else if (SCRIPT_FIELDS_FIELD.match(currentFieldName)) {
|
} else if (SCRIPT_FIELDS_FIELD.match(currentFieldName)) {
|
||||||
scriptFields = new ArrayList<>();
|
scriptFields = new ArrayList<>();
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
|
@ -1059,9 +1059,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (_SOURCE_FIELD.match(currentFieldName)) {
|
} else if (_SOURCE_FIELD.match(currentFieldName)) {
|
||||||
fetchSourceContext = FetchSourceContext.parse(context.parser());
|
fetchSourceContext = FetchSourceContext.fromXContent(context.parser());
|
||||||
} else if (SEARCH_AFTER.match(currentFieldName)) {
|
} else if (SEARCH_AFTER.match(currentFieldName)) {
|
||||||
searchAfterBuilder = SearchAfterBuilder.fromXContent(parser, context.getParseFieldMatcher());
|
searchAfterBuilder = SearchAfterBuilder.fromXContent(parser);
|
||||||
} else if (FIELDS_FIELD.match(currentFieldName)) {
|
} else if (FIELDS_FIELD.match(currentFieldName)) {
|
||||||
throw new ParsingException(parser.getTokenLocation(), "The field [" +
|
throw new ParsingException(parser.getTokenLocation(), "The field [" +
|
||||||
SearchSourceBuilder.FIELDS_FIELD + "] is no longer supported, please use [" +
|
SearchSourceBuilder.FIELDS_FIELD + "] is no longer supported, please use [" +
|
||||||
|
@ -1341,7 +1341,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if (SCRIPT_FIELD.match(currentFieldName)) {
|
if (SCRIPT_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else if (IGNORE_FAILURE_FIELD.match(currentFieldName)) {
|
} else if (IGNORE_FAILURE_FIELD.match(currentFieldName)) {
|
||||||
ignoreFailure = parser.booleanValue();
|
ignoreFailure = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
|
@ -1350,7 +1350,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if (SCRIPT_FIELD.match(currentFieldName)) {
|
if (SCRIPT_FIELD.match(currentFieldName)) {
|
||||||
script = Script.parse(parser, context.getParseFieldMatcher(), context.getDefaultScriptLanguage());
|
script = Script.parse(parser, context.getDefaultScriptLanguage());
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName
|
||||||
+ "].", parser.getTokenLocation());
|
+ "].", parser.getTokenLocation());
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.fetch.subphase;
|
||||||
|
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
@ -55,10 +54,6 @@ public class FetchSourceContext implements Writeable, ToXContent {
|
||||||
private final String[] excludes;
|
private final String[] excludes;
|
||||||
private Function<Map<String, ?>, Map<String, Object>> filter;
|
private Function<Map<String, ?>, Map<String, Object>> filter;
|
||||||
|
|
||||||
public static FetchSourceContext parse(XContentParser parser) throws IOException {
|
|
||||||
return fromXContent(parser, ParseFieldMatcher.STRICT);
|
|
||||||
}
|
|
||||||
|
|
||||||
public FetchSourceContext(boolean fetchSource, String[] includes, String[] excludes) {
|
public FetchSourceContext(boolean fetchSource, String[] includes, String[] excludes) {
|
||||||
this.fetchSource = fetchSource;
|
this.fetchSource = fetchSource;
|
||||||
this.includes = includes == null ? Strings.EMPTY_ARRAY : includes;
|
this.includes = includes == null ? Strings.EMPTY_ARRAY : includes;
|
||||||
|
@ -127,7 +122,7 @@ public class FetchSourceContext implements Writeable, ToXContent {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static FetchSourceContext fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
public static FetchSourceContext fromXContent(XContentParser parser) throws IOException {
|
||||||
XContentParser.Token token = parser.currentToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
boolean fetchSource = true;
|
boolean fetchSource = true;
|
||||||
String[] includes = Strings.EMPTY_ARRAY;
|
String[] includes = Strings.EMPTY_ARRAY;
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.util.Counter;
|
import org.apache.lucene.util.Counter;
|
||||||
import org.elasticsearch.action.search.SearchTask;
|
import org.elasticsearch.action.search.SearchTask;
|
||||||
import org.elasticsearch.action.search.SearchType;
|
import org.elasticsearch.action.search.SearchType;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.BigArrays;
|
import org.elasticsearch.common.util.BigArrays;
|
||||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||||
|
|
|
@ -23,8 +23,6 @@ import org.apache.lucene.search.Explanation;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.compress.CompressorFactory;
|
import org.elasticsearch.common.compress.CompressorFactory;
|
||||||
|
@ -726,7 +724,7 @@ public class InternalSearchHit implements SearchHit {
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final ConstructingObjectParser<InternalNestedIdentity, ParseFieldMatcherSupplier> PARSER = new ConstructingObjectParser<>(
|
private static final ConstructingObjectParser<InternalNestedIdentity, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
"nested_identity",
|
"nested_identity",
|
||||||
ctorArgs -> new InternalNestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (InternalNestedIdentity) ctorArgs[2]));
|
ctorArgs -> new InternalNestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (InternalNestedIdentity) ctorArgs[2]));
|
||||||
static {
|
static {
|
||||||
|
@ -736,7 +734,7 @@ public class InternalSearchHit implements SearchHit {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static InternalNestedIdentity fromXContent(XContentParser parser) {
|
public static InternalNestedIdentity fromXContent(XContentParser parser) {
|
||||||
return PARSER.apply(parser, () -> ParseFieldMatcher.EMPTY);
|
return PARSER.apply(parser, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.search.FieldDoc;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -202,7 +201,7 @@ public class SearchAfterBuilder implements ToXContent, Writeable {
|
||||||
builder.array(SEARCH_AFTER.getPreferredName(), sortValues);
|
builder.array(SEARCH_AFTER.getPreferredName(), sortValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static SearchAfterBuilder fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
public static SearchAfterBuilder fromXContent(XContentParser parser) throws IOException {
|
||||||
SearchAfterBuilder builder = new SearchAfterBuilder();
|
SearchAfterBuilder builder = new SearchAfterBuilder();
|
||||||
XContentParser.Token token = parser.currentToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
List<Object> values = new ArrayList<> ();
|
List<Object> values = new ArrayList<> ();
|
||||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.util.BitSet;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
|
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
|
||||||
|
@ -398,7 +397,6 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
||||||
*/
|
*/
|
||||||
public static GeoDistanceSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException {
|
public static GeoDistanceSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException {
|
||||||
XContentParser parser = context.parser();
|
XContentParser parser = context.parser();
|
||||||
ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher();
|
|
||||||
String fieldName = null;
|
String fieldName = null;
|
||||||
List<GeoPoint> geoPoints = new ArrayList<>();
|
List<GeoPoint> geoPoints = new ArrayList<>();
|
||||||
DistanceUnit unit = DistanceUnit.DEFAULT;
|
DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||||
|
|
|
@ -216,7 +216,8 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
||||||
a -> new ScriptSortBuilder((Script) a[0], (ScriptSortType) a[1]));
|
a -> new ScriptSortBuilder((Script) a[0], (ScriptSortType) a[1]));
|
||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareField(constructorArg(), Script::parse, Script.SCRIPT_PARSE_FIELD, ValueType.OBJECT_OR_STRING);
|
PARSER.declareField(constructorArg(), (parser, context) -> Script.parse(parser, context.getDefaultScriptLanguage()),
|
||||||
|
Script.SCRIPT_PARSE_FIELD, ValueType.OBJECT_OR_STRING);
|
||||||
PARSER.declareField(constructorArg(), p -> ScriptSortType.fromString(p.text()), TYPE_FIELD, ValueType.STRING);
|
PARSER.declareField(constructorArg(), p -> ScriptSortType.fromString(p.text()), TYPE_FIELD, ValueType.STRING);
|
||||||
PARSER.declareString((b, v) -> b.order(SortOrder.fromString(v)), ORDER_FIELD);
|
PARSER.declareString((b, v) -> b.order(SortOrder.fromString(v)), ORDER_FIELD);
|
||||||
PARSER.declareString((b, v) -> b.sortMode(SortMode.fromString(v)), SORTMODE_FIELD);
|
PARSER.declareString((b, v) -> b.sortMode(SortMode.fromString(v)), SORTMODE_FIELD);
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.suggest;
|
||||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -140,7 +139,6 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable {
|
||||||
|
|
||||||
public static SuggestBuilder fromXContent(QueryParseContext parseContext, Suggesters suggesters) throws IOException {
|
public static SuggestBuilder fromXContent(QueryParseContext parseContext, Suggesters suggesters) throws IOException {
|
||||||
XContentParser parser = parseContext.parser();
|
XContentParser parser = parseContext.parser();
|
||||||
ParseFieldMatcher parseFieldMatcher = parseContext.getParseFieldMatcher();
|
|
||||||
SuggestBuilder suggestBuilder = new SuggestBuilder();
|
SuggestBuilder suggestBuilder = new SuggestBuilder();
|
||||||
String fieldName = null;
|
String fieldName = null;
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.suggest;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
@ -257,7 +256,6 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> implemen
|
||||||
static SuggestionBuilder<?> fromXContent(QueryParseContext parseContext, Suggesters suggesters)
|
static SuggestionBuilder<?> fromXContent(QueryParseContext parseContext, Suggesters suggesters)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
XContentParser parser = parseContext.parser();
|
XContentParser parser = parseContext.parser();
|
||||||
ParseFieldMatcher parsefieldMatcher = parseContext.getParseFieldMatcher();
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
String suggestText = null;
|
String suggestText = null;
|
||||||
|
|
|
@ -21,8 +21,6 @@ package org.elasticsearch.search.suggest.completion.context;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -98,7 +96,7 @@ public final class CategoryQueryContext implements ToXContent {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ObjectParser<Builder, ParseFieldMatcherSupplier> CATEGORY_PARSER = new ObjectParser<>(NAME, null);
|
private static ObjectParser<Builder, Void> CATEGORY_PARSER = new ObjectParser<>(NAME, null);
|
||||||
static {
|
static {
|
||||||
CATEGORY_PARSER.declareString(Builder::setCategory, new ParseField(CONTEXT_VALUE));
|
CATEGORY_PARSER.declareString(Builder::setCategory, new ParseField(CONTEXT_VALUE));
|
||||||
CATEGORY_PARSER.declareInt(Builder::setBoost, new ParseField(CONTEXT_BOOST));
|
CATEGORY_PARSER.declareInt(Builder::setBoost, new ParseField(CONTEXT_BOOST));
|
||||||
|
@ -110,7 +108,7 @@ public final class CategoryQueryContext implements ToXContent {
|
||||||
XContentParser.Token token = parser.currentToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
Builder builder = builder();
|
Builder builder = builder();
|
||||||
if (token == XContentParser.Token.START_OBJECT) {
|
if (token == XContentParser.Token.START_OBJECT) {
|
||||||
CATEGORY_PARSER.parse(parser, builder, () -> ParseFieldMatcher.STRICT);
|
CATEGORY_PARSER.parse(parser, builder, null);
|
||||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||||
builder.setCategory(parser.text());
|
builder.setCategory(parser.text());
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Terms;
|
import org.apache.lucene.index.Terms;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -138,7 +137,6 @@ public final class LinearInterpolation extends SmoothingModel {
|
||||||
double trigramLambda = 0.0;
|
double trigramLambda = 0.0;
|
||||||
double bigramLambda = 0.0;
|
double bigramLambda = 0.0;
|
||||||
double unigramLambda = 0.0;
|
double unigramLambda = 0.0;
|
||||||
ParseFieldMatcher matcher = parseContext.getParseFieldMatcher();
|
|
||||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
fieldName = parser.currentName();
|
fieldName = parser.currentName();
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.suggest.phrase;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -491,7 +490,6 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
|
||||||
static PhraseSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
|
static PhraseSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||||
XContentParser parser = parseContext.parser();
|
XContentParser parser = parseContext.parser();
|
||||||
PhraseSuggestionBuilder tmpSuggestion = new PhraseSuggestionBuilder("_na_");
|
PhraseSuggestionBuilder tmpSuggestion = new PhraseSuggestionBuilder("_na_");
|
||||||
ParseFieldMatcher parseFieldMatcher = parseContext.getParseFieldMatcher();
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
String fieldname = null;
|
String fieldname = null;
|
||||||
|
@ -567,7 +565,7 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
|
||||||
"suggester[phrase][collate] query already set, doesn't support additional ["
|
"suggester[phrase][collate] query already set, doesn't support additional ["
|
||||||
+ currentFieldName + "]");
|
+ currentFieldName + "]");
|
||||||
}
|
}
|
||||||
Script template = Script.parse(parser, parseFieldMatcher, "mustache");
|
Script template = Script.parse(parser, "mustache");
|
||||||
tmpSuggestion.collateQuery(template);
|
tmpSuggestion.collateQuery(template);
|
||||||
} else if (PhraseSuggestionBuilder.COLLATE_QUERY_PARAMS.match(currentFieldName)) {
|
} else if (PhraseSuggestionBuilder.COLLATE_QUERY_PARAMS.match(currentFieldName)) {
|
||||||
tmpSuggestion.collateParams(parser.map());
|
tmpSuggestion.collateParams(parser.map());
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.suggest.phrase;
|
package org.elasticsearch.search.suggest.phrase;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
@ -67,7 +66,6 @@ public abstract class SmoothingModel implements NamedWriteable, ToXContent {
|
||||||
|
|
||||||
public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
|
public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
|
||||||
XContentParser parser = parseContext.parser();
|
XContentParser parser = parseContext.parser();
|
||||||
ParseFieldMatcher parseFieldMatcher = parseContext.getParseFieldMatcher();
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String fieldName = null;
|
String fieldName = null;
|
||||||
SmoothingModel model = null;
|
SmoothingModel model = null;
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.search.spell.LuceneLevenshteinDistance;
|
||||||
import org.apache.lucene.search.spell.NGramDistance;
|
import org.apache.lucene.search.spell.NGramDistance;
|
||||||
import org.apache.lucene.search.spell.StringDistance;
|
import org.apache.lucene.search.spell.StringDistance;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -391,7 +390,6 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
|
||||||
static TermSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
|
static TermSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||||
XContentParser parser = parseContext.parser();
|
XContentParser parser = parseContext.parser();
|
||||||
TermSuggestionBuilder tmpSuggestion = new TermSuggestionBuilder("_na_");
|
TermSuggestionBuilder tmpSuggestion = new TermSuggestionBuilder("_na_");
|
||||||
ParseFieldMatcher parseFieldMatcher = parseContext.getParseFieldMatcher();
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
String fieldname = null;
|
String fieldname = null;
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.snapshots;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.ShardOperationFailedException;
|
import org.elasticsearch.action.ShardOperationFailedException;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
|
|
@ -36,8 +36,8 @@ public class LiveVersionMapTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
long actualRamBytesUsed = RamUsageTester.sizeOf(map);
|
long actualRamBytesUsed = RamUsageTester.sizeOf(map);
|
||||||
long estimatedRamBytesUsed = map.ramBytesUsed();
|
long estimatedRamBytesUsed = map.ramBytesUsed();
|
||||||
// less than 25% off
|
// less than 50% off
|
||||||
assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, actualRamBytesUsed / 4);
|
assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, actualRamBytesUsed / 2);
|
||||||
|
|
||||||
// now refresh
|
// now refresh
|
||||||
map.beforeRefresh();
|
map.beforeRefresh();
|
||||||
|
|
|
@ -724,14 +724,14 @@ public class IndexRecoveryIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
final AtomicBoolean seenWaitForClusterState = new AtomicBoolean();
|
final AtomicBoolean finalized = new AtomicBoolean();
|
||||||
blueMockTransportService.addDelegate(redMockTransportService, new MockTransportService.DelegateTransport(blueMockTransportService.original()) {
|
blueMockTransportService.addDelegate(redMockTransportService, new MockTransportService.DelegateTransport(blueMockTransportService.original()) {
|
||||||
@Override
|
@Override
|
||||||
protected void sendRequest(Connection connection, long requestId, String action, TransportRequest request,
|
protected void sendRequest(Connection connection, long requestId, String action, TransportRequest request,
|
||||||
TransportRequestOptions options) throws IOException {
|
TransportRequestOptions options) throws IOException {
|
||||||
logger.info("--> sending request {} on {}", action, connection.getNode());
|
logger.info("--> sending request {} on {}", action, connection.getNode());
|
||||||
if (action.equals(PeerRecoveryTargetService.Actions.WAIT_CLUSTERSTATE)) {
|
if (action.equals(PeerRecoveryTargetService.Actions.FINALIZE)) {
|
||||||
seenWaitForClusterState.set(true);
|
finalized.set(true);
|
||||||
}
|
}
|
||||||
super.sendRequest(connection, requestId, action, request, options);
|
super.sendRequest(connection, requestId, action, request, options);
|
||||||
}
|
}
|
||||||
|
@ -743,7 +743,7 @@ public class IndexRecoveryIT extends ESIntegTestCase {
|
||||||
protected void sendRequest(Connection connection, long requestId, String action, TransportRequest request,
|
protected void sendRequest(Connection connection, long requestId, String action, TransportRequest request,
|
||||||
TransportRequestOptions options) throws IOException {
|
TransportRequestOptions options) throws IOException {
|
||||||
logger.info("--> sending request {} on {}", action, connection.getNode());
|
logger.info("--> sending request {} on {}", action, connection.getNode());
|
||||||
if (primaryRelocation == false || seenWaitForClusterState.get() == false) {
|
if ((primaryRelocation && finalized.get()) == false) {
|
||||||
assertNotEquals(action, ShardStateAction.SHARD_FAILED_ACTION_NAME);
|
assertNotEquals(action, ShardStateAction.SHARD_FAILED_ACTION_NAME);
|
||||||
}
|
}
|
||||||
super.sendRequest(connection, requestId, action, request, options);
|
super.sendRequest(connection, requestId, action, request, options);
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.script;
|
package org.elasticsearch.script;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
@ -45,7 +44,7 @@ public class ScriptTests extends ESTestCase {
|
||||||
try (XContentBuilder builder = XContentBuilder.builder(xContent)) {
|
try (XContentBuilder builder = XContentBuilder.builder(xContent)) {
|
||||||
expectedScript.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
expectedScript.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
try (XContentParser parser = createParser(builder)) {
|
try (XContentParser parser = createParser(builder)) {
|
||||||
Script actualScript = Script.parse(parser, ParseFieldMatcher.STRICT);
|
Script actualScript = Script.parse(parser);
|
||||||
assertThat(actualScript, equalTo(expectedScript));
|
assertThat(actualScript, equalTo(expectedScript));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.pipeline.moving.avg;
|
package org.elasticsearch.search.aggregations.pipeline.moving.avg;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.collect.EvictingQueue;
|
import org.elasticsearch.common.collect.EvictingQueue;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel;
|
import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel;
|
import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel;
|
||||||
|
@ -605,7 +604,7 @@ public class MovAvgUnitTests extends ESTestCase {
|
||||||
settings.put("alpha", v);
|
settings.put("alpha", v);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
parser.parse(settings, "pipeline", 10, ParseFieldMatcher.STRICT);
|
parser.parse(settings, "pipeline", 10);
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
fail(parser + " parser should not have thrown SearchParseException while parsing [" +
|
fail(parser + " parser should not have thrown SearchParseException while parsing [" +
|
||||||
v.getClass().getSimpleName() +"]");
|
v.getClass().getSimpleName() +"]");
|
||||||
|
@ -619,7 +618,7 @@ public class MovAvgUnitTests extends ESTestCase {
|
||||||
settings.put("gamma", "abc");
|
settings.put("gamma", "abc");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
parser.parse(settings, "pipeline", 10, ParseFieldMatcher.STRICT);
|
parser.parse(settings, "pipeline", 10);
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
//all good
|
//all good
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -133,7 +133,7 @@ public class SearchAfterBuilderTests extends ESTestCase {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
return SearchAfterBuilder.fromXContent(parser, null);
|
return SearchAfterBuilder.fromXContent(parser);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException {
|
private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException {
|
||||||
|
@ -172,7 +172,7 @@ public class SearchAfterBuilderTests extends ESTestCase {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser, null);
|
SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser);
|
||||||
assertNotSame(searchAfterBuilder, secondSearchAfterBuilder);
|
assertNotSame(searchAfterBuilder, secondSearchAfterBuilder);
|
||||||
assertEquals(searchAfterBuilder, secondSearchAfterBuilder);
|
assertEquals(searchAfterBuilder, secondSearchAfterBuilder);
|
||||||
assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode());
|
assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode());
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.suggest;
|
||||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -142,7 +141,6 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase {
|
||||||
|
|
||||||
static CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
|
static CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||||
XContentParser parser = parseContext.parser();
|
XContentParser parser = parseContext.parser();
|
||||||
ParseFieldMatcher parseFieldMatcher = parseContext.getParseFieldMatcher();
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
String fieldname = null;
|
String fieldname = null;
|
||||||
|
|
|
@ -104,6 +104,7 @@ subprojects {
|
||||||
/*****************************************************************************
|
/*****************************************************************************
|
||||||
* Rest test config *
|
* Rest test config *
|
||||||
*****************************************************************************/
|
*****************************************************************************/
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
project.integTest {
|
project.integTest {
|
||||||
dependsOn project.assemble
|
dependsOn project.assemble
|
||||||
|
|
|
@ -156,9 +156,9 @@ PUT /transactions/stock/_bulk?refresh
|
||||||
{"type": "sale","amount": 80}
|
{"type": "sale","amount": 80}
|
||||||
{"index":{"_id":2}}
|
{"index":{"_id":2}}
|
||||||
{"type": "cost","amount": 10}
|
{"type": "cost","amount": 10}
|
||||||
{"index":{"_id":2}}
|
{"index":{"_id":3}}
|
||||||
{"type": "cost","amount": 30}
|
{"type": "cost","amount": 30}
|
||||||
{"index":{"_id":2}}
|
{"index":{"_id":4}}
|
||||||
{"type": "sale","amount": 130}
|
{"type": "sale","amount": 130}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
|
|
@ -4,7 +4,8 @@
|
||||||
By default, when a previously unseen field is found in a document,
|
By default, when a previously unseen field is found in a document,
|
||||||
Elasticsearch will add the new field to the type mapping. This behaviour can
|
Elasticsearch will add the new field to the type mapping. This behaviour can
|
||||||
be disabled, both at the document and at the <<object,`object`>> level, by
|
be disabled, both at the document and at the <<object,`object`>> level, by
|
||||||
setting the <<dynamic,`dynamic`>> parameter to `false` or to `strict`.
|
setting the <<dynamic,`dynamic`>> parameter to `false` (to ignore new fields) or to `strict` (to throw
|
||||||
|
an exception if an unknown field is encountered).
|
||||||
|
|
||||||
Assuming `dynamic` field mapping is enabled, some simple rules are used to
|
Assuming `dynamic` field mapping is enabled, some simple rules are used to
|
||||||
determine which datatype the field should have:
|
determine which datatype the field should have:
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
package org.elasticsearch.search.aggregations.matrix.stats;
|
package org.elasticsearch.search.aggregations.matrix.stats;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.MultiValueMode;
|
import org.elasticsearch.search.MultiValueMode;
|
||||||
import org.elasticsearch.search.aggregations.support.MultiValuesSourceParser.NumericValuesSourceParser;
|
import org.elasticsearch.search.aggregations.support.MultiValuesSourceParser.NumericValuesSourceParser;
|
||||||
|
@ -39,7 +38,7 @@ public class MatrixStatsParser extends NumericValuesSourceParser {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
Map<ParseField, Object> otherOptions) throws IOException {
|
||||||
if (MULTIVALUE_MODE_FIELD.match(currentFieldName)) {
|
if (MULTIVALUE_MODE_FIELD.match(currentFieldName)) {
|
||||||
if (token == XContentParser.Token.VALUE_STRING) {
|
if (token == XContentParser.Token.VALUE_STRING) {
|
||||||
otherOptions.put(MULTIVALUE_MODE_FIELD, parser.text());
|
otherOptions.put(MULTIVALUE_MODE_FIELD, parser.text());
|
||||||
|
|
|
@ -20,13 +20,12 @@
|
||||||
package org.elasticsearch.search.aggregations.support;
|
package org.elasticsearch.search.aggregations.support;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder.CommonFields;
|
import org.elasticsearch.search.aggregations.AggregationBuilder.CommonFields;
|
||||||
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -85,8 +84,6 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
||||||
String format = null;
|
String format = null;
|
||||||
Map<String, Object> missingMap = null;
|
Map<String, Object> missingMap = null;
|
||||||
Map<ParseField, Object> otherOptions = new HashMap<>();
|
Map<ParseField, Object> otherOptions = new HashMap<>();
|
||||||
final ParseFieldMatcher parseFieldMatcher = context.getParseFieldMatcher();
|
|
||||||
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
|
@ -101,7 +98,7 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " +
|
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " +
|
||||||
"Multi-field aggregations do not support scripts.");
|
"Multi-field aggregations do not support scripts.");
|
||||||
} else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
} else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||||
}
|
}
|
||||||
|
@ -116,7 +113,7 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
||||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " +
|
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " +
|
||||||
"Multi-field aggregations do not support scripts.");
|
"Multi-field aggregations do not support scripts.");
|
||||||
|
|
||||||
} else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
} else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||||
}
|
}
|
||||||
|
@ -135,11 +132,11 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
||||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
} else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||||
}
|
}
|
||||||
} else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(), otherOptions)) {
|
} else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||||
}
|
}
|
||||||
|
@ -198,8 +195,7 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
||||||
* the target type of the final value output by the aggregation
|
* the target type of the final value output by the aggregation
|
||||||
* @param otherOptions
|
* @param otherOptions
|
||||||
* a {@link Map} containing the extra options parsed by the
|
* a {@link Map} containing the extra options parsed by the
|
||||||
* {@link #token(String, String, org.elasticsearch.common.xcontent.XContentParser.Token,
|
* {@link #token(String, String, XContentParser.Token, XContentParser, Map)}
|
||||||
* XContentParser, ParseFieldMatcher, Map)}
|
|
||||||
* method
|
* method
|
||||||
* @return the created factory
|
* @return the created factory
|
||||||
*/
|
*/
|
||||||
|
@ -219,8 +215,6 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
||||||
* the current token for the parser
|
* the current token for the parser
|
||||||
* @param parser
|
* @param parser
|
||||||
* the parser
|
* the parser
|
||||||
* @param parseFieldMatcher
|
|
||||||
* the {@link ParseFieldMatcher} to use to match field names
|
|
||||||
* @param otherOptions
|
* @param otherOptions
|
||||||
* a {@link Map} of options to be populated by successive calls
|
* a {@link Map} of options to be populated by successive calls
|
||||||
* to this method which will then be passed to the
|
* to this method which will then be passed to the
|
||||||
|
@ -232,5 +226,5 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
||||||
* if an error occurs whilst parsing
|
* if an error occurs whilst parsing
|
||||||
*/
|
*/
|
||||||
protected abstract boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
protected abstract boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException;
|
Map<ParseField, Object> otherOptions) throws IOException;
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,8 +23,6 @@ import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.search.SearchRequest;
|
import org.elasticsearch.action.search.SearchRequest;
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -47,7 +45,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||||
|
|
||||||
public class RestSearchTemplateAction extends BaseRestHandler {
|
public class RestSearchTemplateAction extends BaseRestHandler {
|
||||||
|
|
||||||
private static final ObjectParser<SearchTemplateRequest, ParseFieldMatcherSupplier> PARSER;
|
private static final ObjectParser<SearchTemplateRequest, Void> PARSER;
|
||||||
static {
|
static {
|
||||||
PARSER = new ObjectParser<>("search_template");
|
PARSER = new ObjectParser<>("search_template");
|
||||||
PARSER.declareField((parser, request, s) ->
|
PARSER.declareField((parser, request, s) ->
|
||||||
|
@ -105,7 +103,7 @@ public class RestSearchTemplateAction extends BaseRestHandler {
|
||||||
// Creates the search template request
|
// Creates the search template request
|
||||||
SearchTemplateRequest searchTemplateRequest;
|
SearchTemplateRequest searchTemplateRequest;
|
||||||
try (XContentParser parser = request.contentOrSourceParamParser()) {
|
try (XContentParser parser = request.contentOrSourceParamParser()) {
|
||||||
searchTemplateRequest = PARSER.parse(parser, new SearchTemplateRequest(), () -> ParseFieldMatcher.EMPTY);
|
searchTemplateRequest = PARSER.parse(parser, new SearchTemplateRequest(), null);
|
||||||
}
|
}
|
||||||
searchTemplateRequest.setRequest(searchRequest);
|
searchTemplateRequest.setRequest(searchRequest);
|
||||||
|
|
||||||
|
@ -113,6 +111,6 @@ public class RestSearchTemplateAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static SearchTemplateRequest parse(XContentParser parser) throws IOException {
|
public static SearchTemplateRequest parse(XContentParser parser) throws IOException {
|
||||||
return PARSER.parse(parser, new SearchTemplateRequest(), () -> ParseFieldMatcher.EMPTY);
|
return PARSER.parse(parser, new SearchTemplateRequest(), null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -137,7 +137,7 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
|
||||||
*/
|
*/
|
||||||
public static TemplateQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
public static TemplateQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||||
XContentParser parser = parseContext.parser();
|
XContentParser parser = parseContext.parser();
|
||||||
Script template = Script.parse(parser, parseContext.getParseFieldMatcher(), "mustache");
|
Script template = Script.parse(parser, Script.DEFAULT_TEMPLATE_LANG);
|
||||||
return new TemplateQueryBuilder(template);
|
return new TemplateQueryBuilder(template);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
package org.elasticsearch.script.mustache;
|
package org.elasticsearch.script.mustache;
|
||||||
|
|
||||||
import com.github.mustachejava.MustacheFactory;
|
import com.github.mustachejava.MustacheFactory;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
@ -87,7 +85,7 @@ public class MustacheScriptEngineTests extends ESTestCase {
|
||||||
+ "\"params\":{\"template\":\"all\"}"
|
+ "\"params\":{\"template\":\"all\"}"
|
||||||
+ "}";
|
+ "}";
|
||||||
XContentParser parser = createParser(JsonXContent.jsonXContent, templateString);
|
XContentParser parser = createParser(JsonXContent.jsonXContent, templateString);
|
||||||
Script script = Script.parse(parser, ParseFieldMatcher.EMPTY);
|
Script script = Script.parse(parser);
|
||||||
CompiledScript compiledScript = new CompiledScript(ScriptType.INLINE, null, "mustache",
|
CompiledScript compiledScript = new CompiledScript(ScriptType.INLINE, null, "mustache",
|
||||||
qe.compile(null, script.getIdOrCode(), Collections.emptyMap()));
|
qe.compile(null, script.getIdOrCode(), Collections.emptyMap()));
|
||||||
ExecutableScript executableScript = qe.executable(compiledScript, script.getParams());
|
ExecutableScript executableScript = qe.executable(compiledScript, script.getParams());
|
||||||
|
@ -103,7 +101,7 @@ public class MustacheScriptEngineTests extends ESTestCase {
|
||||||
+ " }"
|
+ " }"
|
||||||
+ "}";
|
+ "}";
|
||||||
XContentParser parser = createParser(JsonXContent.jsonXContent, templateString);
|
XContentParser parser = createParser(JsonXContent.jsonXContent, templateString);
|
||||||
Script script = Script.parse(parser, ParseFieldMatcher.EMPTY);
|
Script script = Script.parse(parser);
|
||||||
CompiledScript compiledScript = new CompiledScript(ScriptType.INLINE, null, "mustache",
|
CompiledScript compiledScript = new CompiledScript(ScriptType.INLINE, null, "mustache",
|
||||||
qe.compile(null, script.getIdOrCode(), Collections.emptyMap()));
|
qe.compile(null, script.getIdOrCode(), Collections.emptyMap()));
|
||||||
ExecutableScript executableScript = qe.executable(compiledScript, script.getParams());
|
ExecutableScript executableScript = qe.executable(compiledScript, script.getParams());
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.action.support.ActionFilters;
|
||||||
import org.elasticsearch.action.support.HandledTransportAction;
|
import org.elasticsearch.action.support.HandledTransportAction;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
@ -53,7 +52,6 @@ import java.util.Map;
|
||||||
public class TransportMultiPercolateAction extends HandledTransportAction<MultiPercolateRequest, MultiPercolateResponse> {
|
public class TransportMultiPercolateAction extends HandledTransportAction<MultiPercolateRequest, MultiPercolateResponse> {
|
||||||
|
|
||||||
private final Client client;
|
private final Client client;
|
||||||
private final ParseFieldMatcher parseFieldMatcher;
|
|
||||||
private final SearchRequestParsers searchRequestParsers;
|
private final SearchRequestParsers searchRequestParsers;
|
||||||
private final NamedXContentRegistry xContentRegistry;
|
private final NamedXContentRegistry xContentRegistry;
|
||||||
|
|
||||||
|
@ -66,7 +64,6 @@ public class TransportMultiPercolateAction extends HandledTransportAction<MultiP
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.searchRequestParsers = searchRequestParsers;
|
this.searchRequestParsers = searchRequestParsers;
|
||||||
this.xContentRegistry = xContentRegistry;
|
this.xContentRegistry = xContentRegistry;
|
||||||
this.parseFieldMatcher = new ParseFieldMatcher(settings);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -63,7 +63,6 @@ import java.util.List;
|
||||||
public class TransportPercolateAction extends HandledTransportAction<PercolateRequest, PercolateResponse> {
|
public class TransportPercolateAction extends HandledTransportAction<PercolateRequest, PercolateResponse> {
|
||||||
|
|
||||||
private final Client client;
|
private final Client client;
|
||||||
private final ParseFieldMatcher parseFieldMatcher;
|
|
||||||
private final SearchRequestParsers searchRequestParsers;
|
private final SearchRequestParsers searchRequestParsers;
|
||||||
private final NamedXContentRegistry xContentRegistry;
|
private final NamedXContentRegistry xContentRegistry;
|
||||||
|
|
||||||
|
@ -76,7 +75,6 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.searchRequestParsers = searchRequestParsers;
|
this.searchRequestParsers = searchRequestParsers;
|
||||||
this.xContentRegistry = xContentRegistry;
|
this.xContentRegistry = xContentRegistry;
|
||||||
this.parseFieldMatcher = new ParseFieldMatcher(settings);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -95,10 +95,10 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
|
||||||
destParser.declareString(IndexRequest::setPipeline, new ParseField("pipeline"));
|
destParser.declareString(IndexRequest::setPipeline, new ParseField("pipeline"));
|
||||||
destParser.declareString((s, i) -> s.versionType(VersionType.fromString(i)), new ParseField("version_type"));
|
destParser.declareString((s, i) -> s.versionType(VersionType.fromString(i)), new ParseField("version_type"));
|
||||||
|
|
||||||
PARSER.declareField((p, v, c) -> sourceParser.parse(p, v, c), new ParseField("source"), ValueType.OBJECT);
|
PARSER.declareField(sourceParser::parse, new ParseField("source"), ValueType.OBJECT);
|
||||||
PARSER.declareField((p, v, c) -> destParser.parse(p, v.getDestination(), c), new ParseField("dest"), ValueType.OBJECT);
|
PARSER.declareField((p, v, c) -> destParser.parse(p, v.getDestination(), c), new ParseField("dest"), ValueType.OBJECT);
|
||||||
PARSER.declareInt(ReindexRequest::setSize, new ParseField("size"));
|
PARSER.declareInt(ReindexRequest::setSize, new ParseField("size"));
|
||||||
PARSER.declareField((p, v, c) -> v.setScript(Script.parse(p, c.getParseFieldMatcher())), new ParseField("script"),
|
PARSER.declareField((p, v, c) -> v.setScript(Script.parse(p)), new ParseField("script"),
|
||||||
ValueType.OBJECT);
|
ValueType.OBJECT);
|
||||||
PARSER.declareString(ReindexRequest::setConflicts, new ParseField("conflicts"));
|
PARSER.declareString(ReindexRequest::setConflicts, new ParseField("conflicts"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,23 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
/* This project runs the core REST tests against a 2 node cluster where one of the nodes has a different minor.
|
/* This project runs the core REST tests against a 2 node cluster where one of the nodes has a different minor.
|
||||||
|
|
|
@ -23,4 +23,3 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
apply plugin: 'elasticsearch.standalone-test'
|
apply plugin: 'elasticsearch.standalone-test'
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
// TODO: this test works, but it isn't really a rest test...should we have another plugin for "non rest test that just needs N clusters?"
|
// TODO: this test works, but it isn't really a rest test...should we have another plugin for "non rest test that just needs N clusters?"
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.http;
|
||||||
import org.apache.http.message.BasicHeader;
|
import org.apache.http.message.BasicHeader;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.ActionRequest;
|
import org.elasticsearch.action.ActionRequest;
|
||||||
import org.elasticsearch.action.ActionResponse;
|
|
||||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||||
import org.elasticsearch.action.get.GetRequest;
|
import org.elasticsearch.action.get.GetRequest;
|
||||||
import org.elasticsearch.action.index.IndexRequest;
|
import org.elasticsearch.action.index.IndexRequest;
|
||||||
|
@ -317,7 +316,7 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean apply(String action, ActionRequest request, ActionListener listener) {
|
protected boolean apply(String action, ActionRequest request, ActionListener<?> listener) {
|
||||||
requests.add(new RequestAndHeaders(threadPool.getThreadContext().getHeaders(), request));
|
requests.add(new RequestAndHeaders(threadPool.getThreadContext().getHeaders(), request));
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
|
|
@ -1,4 +1,23 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
integTest {
|
integTest {
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
import org.elasticsearch.gradle.MavenFilteringHack
|
import org.elasticsearch.gradle.MavenFilteringHack
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
ext.pluginsCount = 0
|
ext.pluginsCount = 0
|
||||||
|
@ -40,4 +41,3 @@ processTestResources {
|
||||||
inputs.properties(expansions)
|
inputs.properties(expansions)
|
||||||
MavenFilteringHack.filter(it, expansions)
|
MavenFilteringHack.filter(it, expansions)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
integTest {
|
integTest {
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.elasticsearch.gradle.test.ClusterConfiguration
|
||||||
import org.elasticsearch.gradle.test.ClusterFormationTasks
|
import org.elasticsearch.gradle.test.ClusterFormationTasks
|
||||||
import org.elasticsearch.gradle.test.NodeInfo
|
import org.elasticsearch.gradle.test.NodeInfo
|
||||||
|
|
||||||
|
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||||
apply plugin: 'elasticsearch.rest-test'
|
apply plugin: 'elasticsearch.rest-test'
|
||||||
|
|
||||||
List<NodeInfo> oneNodes
|
List<NodeInfo> oneNodes
|
||||||
|
|
|
@ -7,6 +7,7 @@ List projects = [
|
||||||
'core',
|
'core',
|
||||||
'docs',
|
'docs',
|
||||||
'client:rest',
|
'client:rest',
|
||||||
|
'client:rest-high-level',
|
||||||
'client:sniffer',
|
'client:sniffer',
|
||||||
'client:transport',
|
'client:transport',
|
||||||
'client:test',
|
'client:test',
|
||||||
|
|
|
@ -302,7 +302,7 @@ public class RandomSearchRequestGenerator {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
builder.searchAfter(SearchAfterBuilder.fromXContent(parser, null).getSortValues());
|
builder.searchAfter(SearchAfterBuilder.fromXContent(parser).getSortValues());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException("Error building search_from", e);
|
throw new RuntimeException("Error building search_from", e);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue