mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 01:19:02 +00:00
Merge branch 'master' into settings_prototype
This commit is contained in:
commit
eae3da3b54
47
.gitignore
vendored
47
.gitignore
vendored
@ -1,37 +1,42 @@
|
||||
|
||||
# intellij files
|
||||
.idea/
|
||||
.gradle/
|
||||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
work/
|
||||
/data/
|
||||
logs/
|
||||
.DS_Store
|
||||
build/
|
||||
generated-resources/
|
||||
**/.local*
|
||||
docs/html/
|
||||
docs/build.log
|
||||
/tmp/
|
||||
backwards/
|
||||
html_docs
|
||||
.vagrant/
|
||||
|
||||
## eclipse ignores (use 'mvn eclipse:eclipse' to build eclipse projects)
|
||||
## All files (.project, .classpath, .settings/*) should be generated through Maven which
|
||||
## will correctly set the classpath based on the declared dependencies and write settings
|
||||
## files to ensure common coding style across Eclipse and IDEA.
|
||||
# eclipse files
|
||||
.project
|
||||
.classpath
|
||||
eclipse-build
|
||||
.settings
|
||||
|
||||
## netbeans ignores
|
||||
# netbeans files
|
||||
nb-configuration.xml
|
||||
nbactions.xml
|
||||
|
||||
dependency-reduced-pom.xml
|
||||
# gradle stuff
|
||||
.gradle/
|
||||
build/
|
||||
generated-resources/
|
||||
|
||||
# old patterns specific to maven
|
||||
# maven stuff (to be removed when trunk becomes 4.x)
|
||||
*-execution-hints.log
|
||||
target/
|
||||
dependency-reduced-pom.xml
|
||||
|
||||
# testing stuff
|
||||
**/.local*
|
||||
.vagrant/
|
||||
|
||||
# osx stuff
|
||||
.DS_Store
|
||||
|
||||
# needed in case docs build is run...maybe we can configure doc build to generate files under build?
|
||||
html_docs
|
||||
|
||||
# random old stuff that we should look at the necessity of...
|
||||
/tmp/
|
||||
backwards/
|
||||
|
||||
|
||||
|
10
build.gradle
10
build.gradle
@ -123,6 +123,16 @@ subprojects {
|
||||
}
|
||||
}
|
||||
}
|
||||
// For reasons we don't fully understand yet, external dependencies are not picked up by Ant's optional tasks.
|
||||
// But you can easily do it in another way.
|
||||
// Only if your buildscript and Ant's optional task need the same library would you have to define it twice.
|
||||
// https://docs.gradle.org/current/userguide/organizing_build_logic.html
|
||||
configurations {
|
||||
forbiddenApis
|
||||
}
|
||||
dependencies {
|
||||
forbiddenApis 'de.thetaphi:forbiddenapis:2.0'
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure similar tasks in dependent projects run first. The projectsEvaluated here is
|
||||
|
@ -34,7 +34,8 @@ class PrecommitTasks {
|
||||
List<Task> precommitTasks = [
|
||||
configureForbiddenApis(project),
|
||||
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
|
||||
project.tasks.create('jarHell', JarHellTask.class)]
|
||||
project.tasks.create('jarHell', JarHellTask.class),
|
||||
project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)]
|
||||
|
||||
// tasks with just tests don't need dependency licenses, so this flag makes adding
|
||||
// the task optional
|
||||
|
@ -0,0 +1,207 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.FileVisitResult
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.SimpleFileVisitor
|
||||
import java.nio.file.attribute.BasicFileAttributes
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.artifacts.UnknownConfigurationException
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
|
||||
import org.apache.tools.ant.BuildLogger
|
||||
import org.apache.tools.ant.Project
|
||||
|
||||
/**
|
||||
* Basic static checking to keep tabs on third party JARs
|
||||
*/
|
||||
public class ThirdPartyAuditTask extends DefaultTask {
|
||||
|
||||
// true to be lenient about MISSING CLASSES
|
||||
private boolean missingClasses;
|
||||
|
||||
// patterns for classes to exclude, because we understand their issues
|
||||
private String[] excludes = new String[0];
|
||||
|
||||
ThirdPartyAuditTask() {
|
||||
dependsOn(project.configurations.testCompile)
|
||||
description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'"
|
||||
}
|
||||
|
||||
/**
|
||||
* Set to true to be lenient with missing classes. By default this check will fail if it finds
|
||||
* MISSING CLASSES. This means the set of jars is incomplete. However, in some cases
|
||||
* this can be due to intentional exclusions that are well-tested and understood.
|
||||
*/
|
||||
public void setMissingClasses(boolean value) {
|
||||
missingClasses = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if leniency about missing classes is enabled.
|
||||
*/
|
||||
public boolean isMissingClasses() {
|
||||
return missingClasses;
|
||||
}
|
||||
|
||||
/**
|
||||
* classes that should be excluded from the scan,
|
||||
* e.g. because we know what sheisty stuff those particular classes are up to.
|
||||
*/
|
||||
public void setExcludes(String[] classes) {
|
||||
for (String s : classes) {
|
||||
if (s.indexOf('*') != -1) {
|
||||
throw new IllegalArgumentException("illegal third party audit exclusion: '" + s + "', wildcards are not permitted!")
|
||||
}
|
||||
}
|
||||
excludes = classes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns current list of exclusions.
|
||||
*/
|
||||
public String[] getExcludes() {
|
||||
return excludes;
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void check() {
|
||||
AntBuilder ant = new AntBuilder()
|
||||
|
||||
// we are noisy for many reasons, working around performance problems with forbidden-apis, dealing
|
||||
// with warnings about missing classes, etc. so we use our own "quiet" AntBuilder
|
||||
ant.project.buildListeners.each { listener ->
|
||||
if (listener instanceof BuildLogger) {
|
||||
listener.messageOutputLevel = Project.MSG_ERR;
|
||||
}
|
||||
};
|
||||
|
||||
// we only want third party dependencies.
|
||||
FileCollection jars = project.configurations.testCompile.fileCollection({ dependency ->
|
||||
dependency.group != "org.elasticsearch"
|
||||
})
|
||||
|
||||
// we don't want provided dependencies, which we have already scanned. e.g. don't
|
||||
// scan ES core's dependencies for every single plugin
|
||||
try {
|
||||
jars -= project.configurations.getByName("provided")
|
||||
} catch (UnknownConfigurationException ignored) {}
|
||||
|
||||
// no dependencies matched, we are done
|
||||
if (jars.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ant.taskdef(name: "thirdPartyAudit",
|
||||
classname: "de.thetaphi.forbiddenapis.ant.AntTask",
|
||||
classpath: project.configurations.forbiddenApis.asPath)
|
||||
|
||||
// print which jars we are going to scan, always
|
||||
// this is not the time to try to be succinct! Forbidden will print plenty on its own!
|
||||
Set<String> names = new HashSet<>()
|
||||
for (File jar : jars) {
|
||||
names.add(jar.getName())
|
||||
}
|
||||
logger.error("[thirdPartyAudit] Scanning: " + names)
|
||||
|
||||
// warn that classes are missing
|
||||
// TODO: move these to excludes list!
|
||||
if (missingClasses) {
|
||||
logger.warn("[thirdPartyAudit] WARNING: CLASSES ARE MISSING! Expect NoClassDefFoundError in bug reports from users!")
|
||||
}
|
||||
|
||||
// TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first,
|
||||
// and then remove our temp dir afterwards. don't complain: try it yourself.
|
||||
// we don't use gradle temp dir handling, just google it, or try it yourself.
|
||||
|
||||
File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit')
|
||||
|
||||
// clean up any previous mess (if we failed), then unzip everything to one directory
|
||||
ant.delete(dir: tmpDir.getAbsolutePath())
|
||||
tmpDir.mkdirs()
|
||||
for (File jar : jars) {
|
||||
ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath())
|
||||
}
|
||||
|
||||
// convert exclusion class names to binary file names
|
||||
String[] excludedFiles = new String[excludes.length];
|
||||
for (int i = 0; i < excludes.length; i++) {
|
||||
excludedFiles[i] = excludes[i].replace('.', '/') + ".class"
|
||||
// check if the excluded file exists, if not, sure sign things are outdated
|
||||
if (! new File(tmpDir, excludedFiles[i]).exists()) {
|
||||
throw new IllegalStateException("bogus thirdPartyAudit exclusion: '" + excludes[i] + "', not found in any dependency")
|
||||
}
|
||||
}
|
||||
|
||||
// jarHellReprise
|
||||
checkSheistyClasses(tmpDir.toPath(), new HashSet<>(Arrays.asList(excludedFiles)));
|
||||
|
||||
ant.thirdPartyAudit(internalRuntimeForbidden: true,
|
||||
failOnUnsupportedJava: false,
|
||||
failOnMissingClasses: !missingClasses,
|
||||
classpath: project.configurations.testCompile.asPath) {
|
||||
fileset(dir: tmpDir, excludes: excludedFiles.join(','))
|
||||
}
|
||||
// clean up our mess (if we succeed)
|
||||
ant.delete(dir: tmpDir.getAbsolutePath())
|
||||
}
|
||||
|
||||
/**
|
||||
* check for sheisty classes: if they also exist in the extensions classloader, its jar hell with the jdk!
|
||||
*/
|
||||
private void checkSheistyClasses(Path root, Set<String> excluded) {
|
||||
// system.parent = extensions loader.
|
||||
// note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!).
|
||||
// but groovy/gradle needs to work at all first!
|
||||
ClassLoader ext = ClassLoader.getSystemClassLoader().getParent()
|
||||
assert ext != null
|
||||
|
||||
Set<String> sheistySet = new TreeSet<>();
|
||||
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
String entry = root.relativize(file).toString()
|
||||
if (entry.endsWith(".class")) {
|
||||
if (ext.getResource(entry) != null) {
|
||||
sheistySet.add(entry);
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
|
||||
// check if we are ok
|
||||
if (sheistySet.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// leniency against exclusions list
|
||||
sheistySet.removeAll(excluded);
|
||||
|
||||
if (sheistySet.isEmpty()) {
|
||||
logger.warn("[thirdPartyAudit] WARNING: JAR HELL WITH JDK! Expect insanely hard-to-debug problems!")
|
||||
} else {
|
||||
throw new IllegalStateException("JAR HELL WITH JDK! " + sheistySet);
|
||||
}
|
||||
}
|
||||
}
|
@ -111,6 +111,14 @@ forbiddenPatterns {
|
||||
exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt'
|
||||
}
|
||||
|
||||
// classes are missing, e.g. org.jboss.marshalling.Marshaller
|
||||
thirdPartyAudit.missingClasses = true
|
||||
// uses internal sun ssl classes!
|
||||
thirdPartyAudit.excludes = [
|
||||
// uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name)
|
||||
'org.jboss.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator',
|
||||
]
|
||||
|
||||
// dependency license are currently checked in distribution
|
||||
dependencyLicenses.enabled = false
|
||||
|
||||
|
@ -33,6 +33,8 @@ import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
public class TransportRenderSearchTemplateAction extends HandledTransportAction<RenderSearchTemplateRequest, RenderSearchTemplateResponse> {
|
||||
|
||||
private final ScriptService scriptService;
|
||||
@ -55,7 +57,7 @@ public class TransportRenderSearchTemplateAction extends HandledTransportAction<
|
||||
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, request);
|
||||
ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, request, Collections.emptyMap());
|
||||
BytesReference processedTemplate = (BytesReference) executable.run();
|
||||
RenderSearchTemplateResponse response = new RenderSearchTemplateResponse();
|
||||
response.source(processedTemplate);
|
||||
|
@ -0,0 +1,203 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
/**
|
||||
* Provides a backoff policy for bulk requests. Whenever a bulk request is rejected due to resource constraints (i.e. the client's internal
|
||||
* thread pool is full), the backoff policy decides how long the bulk processor will wait before the operation is retried internally.
|
||||
*
|
||||
* Notes for implementing custom subclasses:
|
||||
*
|
||||
* The underlying mathematical principle of <code>BackoffPolicy</code> are progressions which can be either finite or infinite although
|
||||
* the latter should not be used for retrying. A progression can be mapped to a <code>java.util.Iterator</code> with the following
|
||||
* semantics:
|
||||
*
|
||||
* <ul>
|
||||
* <li><code>#hasNext()</code> determines whether the progression has more elements. Return <code>true</code> for infinite progressions</li>
|
||||
* <li><code>#next()</code> determines the next element in the progression, i.e. the next wait time period</li>
|
||||
* </ul>
|
||||
*
|
||||
* Note that backoff policies are exposed as <code>Iterables</code> in order to be consumed multiple times.
|
||||
*/
|
||||
public abstract class BackoffPolicy implements Iterable<TimeValue> {
|
||||
private static final BackoffPolicy NO_BACKOFF = new NoBackoff();
|
||||
|
||||
/**
|
||||
* Creates a backoff policy that will not allow any backoff, i.e. an operation will fail after the first attempt.
|
||||
*
|
||||
* @return A backoff policy without any backoff period. The returned instance is thread safe.
|
||||
*/
|
||||
public static BackoffPolicy noBackoff() {
|
||||
return NO_BACKOFF;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an new constant backoff policy with the provided configuration.
|
||||
*
|
||||
* @param delay The delay defines how long to wait between retry attempts. Must not be null.
|
||||
* Must be <= <code>Integer.MAX_VALUE</code> ms.
|
||||
* @param maxNumberOfRetries The maximum number of retries. Must be a non-negative number.
|
||||
* @return A backoff policy with a constant wait time between retries. The returned instance is thread safe but each
|
||||
* iterator created from it should only be used by a single thread.
|
||||
*/
|
||||
public static BackoffPolicy constantBackoff(TimeValue delay, int maxNumberOfRetries) {
|
||||
return new ConstantBackoff(checkDelay(delay), maxNumberOfRetries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an new exponential backoff policy with a default configuration of 50 ms initial wait period and 8 retries taking
|
||||
* roughly 5.1 seconds in total.
|
||||
*
|
||||
* @return A backoff policy with an exponential increase in wait time for retries. The returned instance is thread safe but each
|
||||
* iterator created from it should only be used by a single thread.
|
||||
*/
|
||||
public static BackoffPolicy exponentialBackoff() {
|
||||
return exponentialBackoff(TimeValue.timeValueMillis(50), 8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an new exponential backoff policy with the provided configuration.
|
||||
*
|
||||
* @param initialDelay The initial delay defines how long to wait for the first retry attempt. Must not be null.
|
||||
* Must be <= <code>Integer.MAX_VALUE</code> ms.
|
||||
* @param maxNumberOfRetries The maximum number of retries. Must be a non-negative number.
|
||||
* @return A backoff policy with an exponential increase in wait time for retries. The returned instance is thread safe but each
|
||||
* iterator created from it should only be used by a single thread.
|
||||
*/
|
||||
public static BackoffPolicy exponentialBackoff(TimeValue initialDelay, int maxNumberOfRetries) {
|
||||
return new ExponentialBackoff((int) checkDelay(initialDelay).millis(), maxNumberOfRetries);
|
||||
}
|
||||
|
||||
private static TimeValue checkDelay(TimeValue delay) {
|
||||
if (delay.millis() > Integer.MAX_VALUE) {
|
||||
throw new IllegalArgumentException("delay must be <= " + Integer.MAX_VALUE + " ms");
|
||||
}
|
||||
return delay;
|
||||
}
|
||||
|
||||
private static class NoBackoff extends BackoffPolicy {
|
||||
@Override
|
||||
public Iterator<TimeValue> iterator() {
|
||||
return new Iterator<TimeValue>() {
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeValue next() {
|
||||
throw new NoSuchElementException("No backoff");
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static class ExponentialBackoff extends BackoffPolicy {
|
||||
private final int start;
|
||||
|
||||
private final int numberOfElements;
|
||||
|
||||
private ExponentialBackoff(int start, int numberOfElements) {
|
||||
assert start >= 0;
|
||||
assert numberOfElements >= 0;
|
||||
this.start = start;
|
||||
this.numberOfElements = numberOfElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<TimeValue> iterator() {
|
||||
return new ExponentialBackoffIterator(start, numberOfElements);
|
||||
}
|
||||
}
|
||||
|
||||
private static class ExponentialBackoffIterator implements Iterator<TimeValue> {
|
||||
private final int numberOfElements;
|
||||
|
||||
private final int start;
|
||||
|
||||
private int currentlyConsumed;
|
||||
|
||||
private ExponentialBackoffIterator(int start, int numberOfElements) {
|
||||
this.start = start;
|
||||
this.numberOfElements = numberOfElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return currentlyConsumed < numberOfElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeValue next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException("Only up to " + numberOfElements + " elements");
|
||||
}
|
||||
int result = start + 10 * ((int) Math.exp(0.8d * (currentlyConsumed)) - 1);
|
||||
currentlyConsumed++;
|
||||
return TimeValue.timeValueMillis(result);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class ConstantBackoff extends BackoffPolicy {
|
||||
private final TimeValue delay;
|
||||
|
||||
private final int numberOfElements;
|
||||
|
||||
public ConstantBackoff(TimeValue delay, int numberOfElements) {
|
||||
assert numberOfElements >= 0;
|
||||
this.delay = delay;
|
||||
this.numberOfElements = numberOfElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<TimeValue> iterator() {
|
||||
return new ConstantBackoffIterator(delay, numberOfElements);
|
||||
}
|
||||
}
|
||||
|
||||
private static final class ConstantBackoffIterator implements Iterator<TimeValue> {
|
||||
private final TimeValue delay;
|
||||
private final int numberOfElements;
|
||||
private int curr;
|
||||
|
||||
public ConstantBackoffIterator(TimeValue delay, int numberOfElements) {
|
||||
this.delay = delay;
|
||||
this.numberOfElements = numberOfElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return curr < numberOfElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeValue next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
curr++;
|
||||
return delay;
|
||||
}
|
||||
}
|
||||
}
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
@ -48,7 +47,7 @@ public class BulkProcessor implements Closeable {
|
||||
/**
|
||||
* A listener for the execution.
|
||||
*/
|
||||
public static interface Listener {
|
||||
public interface Listener {
|
||||
|
||||
/**
|
||||
* Callback before the bulk is executed.
|
||||
@ -79,6 +78,7 @@ public class BulkProcessor implements Closeable {
|
||||
private int bulkActions = 1000;
|
||||
private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB);
|
||||
private TimeValue flushInterval = null;
|
||||
private BackoffPolicy backoffPolicy = BackoffPolicy.noBackoff();
|
||||
|
||||
/**
|
||||
* Creates a builder of bulk processor with the client to use and the listener that will be used
|
||||
@ -136,11 +136,25 @@ public class BulkProcessor implements Closeable {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a custom backoff policy. The backoff policy defines how the bulk processor should handle retries of bulk requests internally
|
||||
* in case they have failed due to resource constraints (i.e. a thread pool was full).
|
||||
*
|
||||
* The default is to not back off, i.e. failing immediately.
|
||||
*/
|
||||
public Builder setBackoffPolicy(BackoffPolicy backoffPolicy) {
|
||||
if (backoffPolicy == null) {
|
||||
throw new NullPointerException("'backoffPolicy' must not be null. To disable backoff, pass BackoffPolicy.noBackoff()");
|
||||
}
|
||||
this.backoffPolicy = backoffPolicy;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a new bulk processor.
|
||||
*/
|
||||
public BulkProcessor build() {
|
||||
return new BulkProcessor(client, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval);
|
||||
return new BulkProcessor(client, backoffPolicy, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval);
|
||||
}
|
||||
}
|
||||
|
||||
@ -152,38 +166,27 @@ public class BulkProcessor implements Closeable {
|
||||
return new Builder(client, listener);
|
||||
}
|
||||
|
||||
private final Client client;
|
||||
private final Listener listener;
|
||||
|
||||
private final String name;
|
||||
|
||||
private final int concurrentRequests;
|
||||
private final int bulkActions;
|
||||
private final long bulkSize;
|
||||
private final TimeValue flushInterval;
|
||||
|
||||
private final Semaphore semaphore;
|
||||
|
||||
private final ScheduledThreadPoolExecutor scheduler;
|
||||
private final ScheduledFuture scheduledFuture;
|
||||
|
||||
private final AtomicLong executionIdGen = new AtomicLong();
|
||||
|
||||
private BulkRequest bulkRequest;
|
||||
private final BulkRequestHandler bulkRequestHandler;
|
||||
|
||||
private volatile boolean closed = false;
|
||||
|
||||
BulkProcessor(Client client, Listener listener, @Nullable String name, int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval) {
|
||||
this.client = client;
|
||||
this.listener = listener;
|
||||
this.name = name;
|
||||
this.concurrentRequests = concurrentRequests;
|
||||
BulkProcessor(Client client, BackoffPolicy backoffPolicy, Listener listener, @Nullable String name, int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval) {
|
||||
this.bulkActions = bulkActions;
|
||||
this.bulkSize = bulkSize.bytes();
|
||||
|
||||
this.semaphore = new Semaphore(concurrentRequests);
|
||||
this.bulkRequest = new BulkRequest();
|
||||
this.bulkRequestHandler = (concurrentRequests == 0) ? BulkRequestHandler.syncHandler(client, backoffPolicy, listener) : BulkRequestHandler.asyncHandler(client, backoffPolicy, listener, concurrentRequests);
|
||||
|
||||
this.flushInterval = flushInterval;
|
||||
if (flushInterval != null) {
|
||||
this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1, EsExecutors.daemonThreadFactory(client.settings(), (name != null ? "[" + name + "]" : "") + "bulk_processor"));
|
||||
this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
|
||||
@ -231,14 +234,7 @@ public class BulkProcessor implements Closeable {
|
||||
if (bulkRequest.numberOfActions() > 0) {
|
||||
execute();
|
||||
}
|
||||
if (this.concurrentRequests < 1) {
|
||||
return true;
|
||||
}
|
||||
if (semaphore.tryAcquire(this.concurrentRequests, timeout, unit)) {
|
||||
semaphore.release(this.concurrentRequests);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
return this.bulkRequestHandler.awaitClose(timeout, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -308,58 +304,7 @@ public class BulkProcessor implements Closeable {
|
||||
final long executionId = executionIdGen.incrementAndGet();
|
||||
|
||||
this.bulkRequest = new BulkRequest();
|
||||
|
||||
if (concurrentRequests == 0) {
|
||||
// execute in a blocking fashion...
|
||||
boolean afterCalled = false;
|
||||
try {
|
||||
listener.beforeBulk(executionId, bulkRequest);
|
||||
BulkResponse bulkItemResponses = client.bulk(bulkRequest).actionGet();
|
||||
afterCalled = true;
|
||||
listener.afterBulk(executionId, bulkRequest, bulkItemResponses);
|
||||
} catch (Exception e) {
|
||||
if (!afterCalled) {
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
boolean success = false;
|
||||
boolean acquired = false;
|
||||
try {
|
||||
listener.beforeBulk(executionId, bulkRequest);
|
||||
semaphore.acquire();
|
||||
acquired = true;
|
||||
client.bulk(bulkRequest, new ActionListener<BulkResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkResponse response) {
|
||||
try {
|
||||
listener.afterBulk(executionId, bulkRequest, response);
|
||||
} finally {
|
||||
semaphore.release();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
try {
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
} finally {
|
||||
semaphore.release();
|
||||
}
|
||||
}
|
||||
});
|
||||
success = true;
|
||||
} catch (InterruptedException e) {
|
||||
Thread.interrupted();
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
} catch (Throwable t) {
|
||||
listener.afterBulk(executionId, bulkRequest, t);
|
||||
} finally {
|
||||
if (!success && acquired) { // if we fail on client.bulk() release the semaphore
|
||||
semaphore.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
this.bulkRequestHandler.execute(bulkRequest, executionId);
|
||||
}
|
||||
|
||||
private boolean isOverTheLimit() {
|
||||
|
@ -0,0 +1,160 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Abstracts the low-level details of bulk request handling
|
||||
*/
|
||||
abstract class BulkRequestHandler {
|
||||
protected final ESLogger logger;
|
||||
protected final Client client;
|
||||
|
||||
protected BulkRequestHandler(Client client) {
|
||||
this.client = client;
|
||||
this.logger = Loggers.getLogger(getClass(), client.settings());
|
||||
}
|
||||
|
||||
|
||||
public abstract void execute(BulkRequest bulkRequest, long executionId);
|
||||
|
||||
public abstract boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
|
||||
|
||||
public static BulkRequestHandler syncHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) {
|
||||
return new SyncBulkRequestHandler(client, backoffPolicy, listener);
|
||||
}
|
||||
|
||||
public static BulkRequestHandler asyncHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, int concurrentRequests) {
|
||||
return new AsyncBulkRequestHandler(client, backoffPolicy, listener, concurrentRequests);
|
||||
}
|
||||
|
||||
private static class SyncBulkRequestHandler extends BulkRequestHandler {
|
||||
private final BulkProcessor.Listener listener;
|
||||
private final BackoffPolicy backoffPolicy;
|
||||
|
||||
public SyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener) {
|
||||
super(client);
|
||||
this.backoffPolicy = backoffPolicy;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(BulkRequest bulkRequest, long executionId) {
|
||||
boolean afterCalled = false;
|
||||
try {
|
||||
listener.beforeBulk(executionId, bulkRequest);
|
||||
BulkResponse bulkResponse = Retry
|
||||
.on(EsRejectedExecutionException.class)
|
||||
.policy(backoffPolicy)
|
||||
.withSyncBackoff(client, bulkRequest);
|
||||
afterCalled = true;
|
||||
listener.afterBulk(executionId, bulkRequest, bulkResponse);
|
||||
} catch (Exception e) {
|
||||
if (!afterCalled) {
|
||||
logger.warn("Failed to executed bulk request {}.", e, executionId);
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
// we are "closed" immediately as there is no request in flight
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private static class AsyncBulkRequestHandler extends BulkRequestHandler {
|
||||
private final BackoffPolicy backoffPolicy;
|
||||
private final BulkProcessor.Listener listener;
|
||||
private final Semaphore semaphore;
|
||||
private final int concurrentRequests;
|
||||
|
||||
private AsyncBulkRequestHandler(Client client, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, int concurrentRequests) {
|
||||
super(client);
|
||||
this.backoffPolicy = backoffPolicy;
|
||||
assert concurrentRequests > 0;
|
||||
this.listener = listener;
|
||||
this.concurrentRequests = concurrentRequests;
|
||||
this.semaphore = new Semaphore(concurrentRequests);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(BulkRequest bulkRequest, long executionId) {
|
||||
boolean bulkRequestSetupSuccessful = false;
|
||||
boolean acquired = false;
|
||||
try {
|
||||
listener.beforeBulk(executionId, bulkRequest);
|
||||
semaphore.acquire();
|
||||
acquired = true;
|
||||
Retry.on(EsRejectedExecutionException.class)
|
||||
.policy(backoffPolicy)
|
||||
.withAsyncBackoff(client, bulkRequest, new ActionListener<BulkResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkResponse response) {
|
||||
try {
|
||||
listener.afterBulk(executionId, bulkRequest, response);
|
||||
} finally {
|
||||
semaphore.release();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
try {
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
} finally {
|
||||
semaphore.release();
|
||||
}
|
||||
}
|
||||
});
|
||||
bulkRequestSetupSuccessful = true;
|
||||
} catch (InterruptedException e) {
|
||||
// This is intentionally wrong to avoid changing the behaviour implicitly with this PR. It will be fixed in #14833
|
||||
Thread.interrupted();
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("Failed to executed bulk request {}.", t, executionId);
|
||||
listener.afterBulk(executionId, bulkRequest, t);
|
||||
} finally {
|
||||
if (!bulkRequestSetupSuccessful && acquired) { // if we fail on client.bulk() release the semaphore
|
||||
semaphore.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
if (semaphore.tryAcquire(this.concurrentRequests, timeout, unit)) {
|
||||
semaphore.release(this.concurrentRequests);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
237
core/src/main/java/org/elasticsearch/action/bulk/Retry.java
Normal file
237
core/src/main/java/org/elasticsearch/action/bulk/Retry.java
Normal file
@ -0,0 +1,237 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
* Encapsulates synchronous and asynchronous retry logic.
|
||||
*/
|
||||
class Retry {
|
||||
private final Class<? extends Throwable> retryOnThrowable;
|
||||
|
||||
private BackoffPolicy backoffPolicy;
|
||||
|
||||
public static Retry on(Class<? extends Throwable> retryOnThrowable) {
|
||||
return new Retry(retryOnThrowable);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param backoffPolicy The backoff policy that defines how long and how often to wait for retries.
|
||||
*/
|
||||
public Retry policy(BackoffPolicy backoffPolicy) {
|
||||
this.backoffPolicy = backoffPolicy;
|
||||
return this;
|
||||
}
|
||||
|
||||
Retry(Class<? extends Throwable> retryOnThrowable) {
|
||||
this.retryOnThrowable = retryOnThrowable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes #bulk(BulkRequest, ActionListener) on the provided client. Backs off on the provided exception and delegates results to the
|
||||
* provided listener.
|
||||
*
|
||||
* @param client Client invoking the bulk request.
|
||||
* @param bulkRequest The bulk request that should be executed.
|
||||
* @param listener A listener that is invoked when the bulk request finishes or completes with an exception. The listener is not
|
||||
*/
|
||||
public void withAsyncBackoff(Client client, BulkRequest bulkRequest, ActionListener<BulkResponse> listener) {
|
||||
AsyncRetryHandler r = new AsyncRetryHandler(retryOnThrowable, backoffPolicy, client, listener);
|
||||
r.execute(bulkRequest);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Invokes #bulk(BulkRequest) on the provided client. Backs off on the provided exception.
|
||||
*
|
||||
* @param client Client invoking the bulk request.
|
||||
* @param bulkRequest The bulk request that should be executed.
|
||||
* @return the bulk response as returned by the client.
|
||||
* @throws Exception Any exception thrown by the callable.
|
||||
*/
|
||||
public BulkResponse withSyncBackoff(Client client, BulkRequest bulkRequest) throws Exception {
|
||||
return SyncRetryHandler
|
||||
.create(retryOnThrowable, backoffPolicy, client)
|
||||
.executeBlocking(bulkRequest)
|
||||
.actionGet();
|
||||
}
|
||||
|
||||
static class AbstractRetryHandler implements ActionListener<BulkResponse> {
|
||||
private final ESLogger logger;
|
||||
private final Client client;
|
||||
private final ActionListener<BulkResponse> listener;
|
||||
private final Iterator<TimeValue> backoff;
|
||||
private final Class<? extends Throwable> retryOnThrowable;
|
||||
// Access only when holding a client-side lock, see also #addResponses()
|
||||
private final List<BulkItemResponse> responses = new ArrayList<>();
|
||||
private final long startTimestampNanos;
|
||||
// needed to construct the next bulk request based on the response to the previous one
|
||||
// volatile as we're called from a scheduled thread
|
||||
private volatile BulkRequest currentBulkRequest;
|
||||
private volatile ScheduledFuture<?> scheduledRequestFuture;
|
||||
|
||||
public AbstractRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener<BulkResponse> listener) {
|
||||
this.retryOnThrowable = retryOnThrowable;
|
||||
this.backoff = backoffPolicy.iterator();
|
||||
this.client = client;
|
||||
this.listener = listener;
|
||||
this.logger = Loggers.getLogger(getClass(), client.settings());
|
||||
// in contrast to System.currentTimeMillis(), nanoTime() uses a monotonic clock under the hood
|
||||
this.startTimestampNanos = System.nanoTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResponse(BulkResponse bulkItemResponses) {
|
||||
if (!bulkItemResponses.hasFailures()) {
|
||||
// we're done here, include all responses
|
||||
addResponses(bulkItemResponses, (r -> true));
|
||||
finishHim();
|
||||
} else {
|
||||
if (canRetry(bulkItemResponses)) {
|
||||
addResponses(bulkItemResponses, (r -> !r.isFailed()));
|
||||
retry(createBulkRequestForRetry(bulkItemResponses));
|
||||
} else {
|
||||
addResponses(bulkItemResponses, (r -> true));
|
||||
finishHim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
try {
|
||||
listener.onFailure(e);
|
||||
} finally {
|
||||
FutureUtils.cancel(scheduledRequestFuture);
|
||||
}
|
||||
}
|
||||
|
||||
private void retry(BulkRequest bulkRequestForRetry) {
|
||||
assert backoff.hasNext();
|
||||
TimeValue next = backoff.next();
|
||||
logger.trace("Retry of bulk request scheduled in {} ms.", next.millis());
|
||||
scheduledRequestFuture = client.threadPool().schedule(next, ThreadPool.Names.SAME, (() -> this.execute(bulkRequestForRetry)));
|
||||
}
|
||||
|
||||
private BulkRequest createBulkRequestForRetry(BulkResponse bulkItemResponses) {
|
||||
BulkRequest requestToReissue = new BulkRequest();
|
||||
int index = 0;
|
||||
for (BulkItemResponse bulkItemResponse : bulkItemResponses.getItems()) {
|
||||
if (bulkItemResponse.isFailed()) {
|
||||
requestToReissue.add(currentBulkRequest.requests().get(index));
|
||||
}
|
||||
index++;
|
||||
}
|
||||
return requestToReissue;
|
||||
}
|
||||
|
||||
private boolean canRetry(BulkResponse bulkItemResponses) {
|
||||
if (!backoff.hasNext()) {
|
||||
return false;
|
||||
}
|
||||
for (BulkItemResponse bulkItemResponse : bulkItemResponses) {
|
||||
if (bulkItemResponse.isFailed()) {
|
||||
Throwable cause = bulkItemResponse.getFailure().getCause();
|
||||
Throwable rootCause = ExceptionsHelper.unwrapCause(cause);
|
||||
if (!rootCause.getClass().equals(retryOnThrowable)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private void finishHim() {
|
||||
try {
|
||||
listener.onResponse(getAccumulatedResponse());
|
||||
} finally {
|
||||
FutureUtils.cancel(scheduledRequestFuture);
|
||||
}
|
||||
}
|
||||
|
||||
private void addResponses(BulkResponse response, Predicate<BulkItemResponse> filter) {
|
||||
for (BulkItemResponse bulkItemResponse : response) {
|
||||
if (filter.test(bulkItemResponse)) {
|
||||
// Use client-side lock here to avoid visibility issues. This method may be called multiple times
|
||||
// (based on how many retries we have to issue) and relying that the response handling code will be
|
||||
// scheduled on the same thread is fragile.
|
||||
synchronized (responses) {
|
||||
responses.add(bulkItemResponse);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private BulkResponse getAccumulatedResponse() {
|
||||
BulkItemResponse[] itemResponses;
|
||||
synchronized (responses) {
|
||||
itemResponses = responses.toArray(new BulkItemResponse[1]);
|
||||
}
|
||||
long stopTimestamp = System.nanoTime();
|
||||
long totalLatencyMs = TimeValue.timeValueNanos(stopTimestamp - startTimestampNanos).millis();
|
||||
return new BulkResponse(itemResponses, totalLatencyMs);
|
||||
}
|
||||
|
||||
public void execute(BulkRequest bulkRequest) {
|
||||
this.currentBulkRequest = bulkRequest;
|
||||
client.bulk(bulkRequest, this);
|
||||
}
|
||||
}
|
||||
|
||||
static class AsyncRetryHandler extends AbstractRetryHandler {
|
||||
public AsyncRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, ActionListener<BulkResponse> listener) {
|
||||
super(retryOnThrowable, backoffPolicy, client, listener);
|
||||
}
|
||||
}
|
||||
|
||||
static class SyncRetryHandler extends AbstractRetryHandler {
|
||||
private final PlainActionFuture<BulkResponse> actionFuture;
|
||||
|
||||
public static SyncRetryHandler create(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client) {
|
||||
PlainActionFuture<BulkResponse> actionFuture = PlainActionFuture.newFuture();
|
||||
return new SyncRetryHandler(retryOnThrowable, backoffPolicy, client, actionFuture);
|
||||
}
|
||||
|
||||
public SyncRetryHandler(Class<? extends Throwable> retryOnThrowable, BackoffPolicy backoffPolicy, Client client, PlainActionFuture<BulkResponse> actionFuture) {
|
||||
super(retryOnThrowable, backoffPolicy, client, actionFuture);
|
||||
this.actionFuture = actionFuture;
|
||||
}
|
||||
|
||||
public ActionFuture<BulkResponse> executeBlocking(BulkRequest bulkRequest) {
|
||||
super.execute(bulkRequest);
|
||||
return actionFuture;
|
||||
}
|
||||
}
|
||||
}
|
@ -50,6 +50,7 @@ import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -245,7 +246,7 @@ public class UpdateHelper extends AbstractComponent {
|
||||
private Map<String, Object> executeScript(UpdateRequest request, Map<String, Object> ctx) {
|
||||
try {
|
||||
if (scriptService != null) {
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request);
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request, Collections.emptyMap());
|
||||
script.setNextVar("ctx", ctx);
|
||||
script.run();
|
||||
// we need to unwrap the ctx...
|
||||
|
@ -1,37 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.transport;
|
||||
|
||||
import org.elasticsearch.client.support.Headers;
|
||||
import org.elasticsearch.client.transport.support.TransportProxyClient;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ClientTransportModule extends AbstractModule {
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(Headers.class).asEagerSingleton();
|
||||
bind(TransportProxyClient.class).asEagerSingleton();
|
||||
bind(TransportClientNodesService.class).asEagerSingleton();
|
||||
}
|
||||
}
|
@ -51,7 +51,6 @@ import org.elasticsearch.plugins.PluginsService;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.transport.netty.NettyTransport;
|
||||
|
||||
@ -65,7 +64,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
* The transport client allows to create a client that is not part of the cluster, but simply connects to one
|
||||
* or more nodes directly by adding their respective addresses using {@link #addTransportAddress(org.elasticsearch.common.transport.TransportAddress)}.
|
||||
* <p>
|
||||
* The transport client important modules used is the {@link org.elasticsearch.transport.TransportModule} which is
|
||||
* The transport client important modules used is the {@link org.elasticsearch.common.network.NetworkModule} which is
|
||||
* started in client mode (only connects, no bind).
|
||||
*/
|
||||
public class TransportClient extends AbstractClient {
|
||||
@ -139,10 +138,9 @@ public class TransportClient extends AbstractClient {
|
||||
}
|
||||
modules.add(new PluginsModule(pluginsService));
|
||||
modules.add(new SettingsModule(this.settings, settingsFilter ));
|
||||
modules.add(new NetworkModule(networkService));
|
||||
modules.add(new NetworkModule(networkService, this.settings, true));
|
||||
modules.add(new ClusterNameModule(this.settings));
|
||||
modules.add(new ThreadPoolModule(threadPool));
|
||||
modules.add(new TransportModule(this.settings));
|
||||
modules.add(new SearchModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
@ -150,7 +148,6 @@ public class TransportClient extends AbstractClient {
|
||||
}
|
||||
});
|
||||
modules.add(new ActionModule(true));
|
||||
modules.add(new ClientTransportModule());
|
||||
modules.add(new CircuitBreakerModule(this.settings));
|
||||
|
||||
pluginsService.processModules(modules);
|
||||
|
@ -37,6 +37,13 @@ public interface ClusterStateTaskExecutor<T> {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback invoked after new cluster state is published. Note that
|
||||
* this method is not invoked if the cluster state was not updated.
|
||||
*/
|
||||
default void clusterStatePublished(ClusterState newClusterState) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the result of a batched execution of cluster state update tasks
|
||||
* @param <T> the type of the cluster state update task
|
||||
|
@ -148,20 +148,9 @@ public class ShardStateAction extends AbstractComponent {
|
||||
@Override
|
||||
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
|
||||
try {
|
||||
int numberOfUnassignedShards = newState.getRoutingNodes().unassigned().size();
|
||||
if (oldState != newState && numberOfUnassignedShards > 0) {
|
||||
String reason = String.format(Locale.ROOT, "[%d] unassigned shards after failing shard [%s]", numberOfUnassignedShards, request.shardRouting);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(reason + ", scheduling a reroute");
|
||||
}
|
||||
routingService.reroute(reason);
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
channel.sendResponse(TransportResponse.Empty.INSTANCE);
|
||||
} catch (Throwable channelThrowable) {
|
||||
logger.warn("failed to send response while failing shard [{}]", channelThrowable, request.shardRouting);
|
||||
}
|
||||
channel.sendResponse(TransportResponse.Empty.INSTANCE);
|
||||
} catch (Throwable channelThrowable) {
|
||||
logger.warn("failed to send response while failing shard [{}]", channelThrowable, request.shardRouting);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -189,6 +178,18 @@ public class ShardStateAction extends AbstractComponent {
|
||||
}
|
||||
return batchResultBuilder.build(maybeUpdatedState);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clusterStatePublished(ClusterState newClusterState) {
|
||||
int numberOfUnassignedShards = newClusterState.getRoutingNodes().unassigned().size();
|
||||
if (numberOfUnassignedShards > 0) {
|
||||
String reason = String.format(Locale.ROOT, "[%d] unassigned shards after failing shards", numberOfUnassignedShards);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(reason + ", scheduling a reroute");
|
||||
}
|
||||
routingService.reroute(reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final ShardFailedClusterStateHandler shardFailedClusterStateHandler = new ShardFailedClusterStateHandler();
|
||||
|
@ -614,6 +614,8 @@ public class InternalClusterService extends AbstractLifecycleComponent<ClusterSe
|
||||
task.listener.clusterStateProcessed(task.source, previousClusterState, newClusterState);
|
||||
}
|
||||
|
||||
executor.clusterStatePublished(newClusterState);
|
||||
|
||||
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS)));
|
||||
logger.debug("processing [{}]: took {} done applying updated cluster_state (version: {}, uuid: {})", source, executionTime, newClusterState.version(), newClusterState.stateUUID());
|
||||
warnAboutSlowTaskIfNeeded(executionTime, source);
|
||||
|
@ -19,21 +19,362 @@
|
||||
|
||||
package org.elasticsearch.common.network;
|
||||
|
||||
import org.elasticsearch.client.support.Headers;
|
||||
import org.elasticsearch.client.transport.TransportClientNodesService;
|
||||
import org.elasticsearch.client.transport.support.TransportProxyClient;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.ExtensionPoint;
|
||||
import org.elasticsearch.http.HttpServer;
|
||||
import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.http.netty.NettyHttpServerTransport;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.put.RestPutRepositoryAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.verify.RestVerifyRepositoryAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.reroute.RestClusterRerouteAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterGetSettingsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterUpdateSettingsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.shards.RestClusterSearchShardsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.create.RestCreateSnapshotAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.delete.RestDeleteSnapshotAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.get.RestGetSnapshotsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.restore.RestRestoreSnapshotAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.status.RestSnapshotsStatusAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.state.RestClusterStateAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.stats.RestClusterStatsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.tasks.RestPendingClusterTasksAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.RestIndicesAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.delete.RestIndexDeleteAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetIndicesAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.head.RestAliasesExistAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.put.RestIndexPutAliasAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.cache.clear.RestClearIndicesCacheAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.close.RestCloseIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.create.RestCreateIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.delete.RestDeleteIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.exists.indices.RestIndicesExistsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.exists.types.RestTypesExistsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.flush.RestFlushAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.flush.RestSyncedFlushAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.forcemerge.RestForceMergeAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.get.RestGetIndicesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetFieldMappingAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetMappingAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.mapping.put.RestPutMappingAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.open.RestOpenIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.recovery.RestRecoveryAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.refresh.RestRefreshAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.segments.RestIndicesSegmentsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.settings.RestGetSettingsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.settings.RestUpdateSettingsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.shards.RestIndicesShardStoresAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.stats.RestIndicesStatsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.template.delete.RestDeleteIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.template.get.RestGetIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.template.head.RestHeadIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.template.put.RestPutIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.upgrade.RestUpgradeAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.validate.query.RestValidateQueryAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.validate.template.RestRenderSearchTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.warmer.delete.RestDeleteWarmerAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.warmer.get.RestGetWarmerAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.warmer.put.RestPutWarmerAction;
|
||||
import org.elasticsearch.rest.action.bulk.RestBulkAction;
|
||||
import org.elasticsearch.rest.action.cat.AbstractCatAction;
|
||||
import org.elasticsearch.rest.action.cat.RestAliasAction;
|
||||
import org.elasticsearch.rest.action.cat.RestAllocationAction;
|
||||
import org.elasticsearch.rest.action.cat.RestCatAction;
|
||||
import org.elasticsearch.rest.action.cat.RestFielddataAction;
|
||||
import org.elasticsearch.rest.action.cat.RestHealthAction;
|
||||
import org.elasticsearch.rest.action.cat.RestIndicesAction;
|
||||
import org.elasticsearch.rest.action.cat.RestMasterAction;
|
||||
import org.elasticsearch.rest.action.cat.RestNodeAttrsAction;
|
||||
import org.elasticsearch.rest.action.cat.RestNodesAction;
|
||||
import org.elasticsearch.rest.action.cat.RestPluginsAction;
|
||||
import org.elasticsearch.rest.action.cat.RestRepositoriesAction;
|
||||
import org.elasticsearch.rest.action.cat.RestSegmentsAction;
|
||||
import org.elasticsearch.rest.action.cat.RestShardsAction;
|
||||
import org.elasticsearch.rest.action.cat.RestSnapshotAction;
|
||||
import org.elasticsearch.rest.action.cat.RestThreadPoolAction;
|
||||
import org.elasticsearch.rest.action.delete.RestDeleteAction;
|
||||
import org.elasticsearch.rest.action.explain.RestExplainAction;
|
||||
import org.elasticsearch.rest.action.fieldstats.RestFieldStatsAction;
|
||||
import org.elasticsearch.rest.action.get.RestGetAction;
|
||||
import org.elasticsearch.rest.action.get.RestGetSourceAction;
|
||||
import org.elasticsearch.rest.action.get.RestHeadAction;
|
||||
import org.elasticsearch.rest.action.get.RestMultiGetAction;
|
||||
import org.elasticsearch.rest.action.index.RestIndexAction;
|
||||
import org.elasticsearch.rest.action.main.RestMainAction;
|
||||
import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction;
|
||||
import org.elasticsearch.rest.action.percolate.RestPercolateAction;
|
||||
import org.elasticsearch.rest.action.script.RestDeleteIndexedScriptAction;
|
||||
import org.elasticsearch.rest.action.script.RestGetIndexedScriptAction;
|
||||
import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction;
|
||||
import org.elasticsearch.rest.action.search.RestClearScrollAction;
|
||||
import org.elasticsearch.rest.action.search.RestMultiSearchAction;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.rest.action.search.RestSearchScrollAction;
|
||||
import org.elasticsearch.rest.action.suggest.RestSuggestAction;
|
||||
import org.elasticsearch.rest.action.template.RestDeleteSearchTemplateAction;
|
||||
import org.elasticsearch.rest.action.template.RestGetSearchTemplateAction;
|
||||
import org.elasticsearch.rest.action.template.RestPutSearchTemplateAction;
|
||||
import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction;
|
||||
import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction;
|
||||
import org.elasticsearch.rest.action.update.RestUpdateAction;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.transport.local.LocalTransport;
|
||||
import org.elasticsearch.transport.netty.NettyTransport;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* A module to handle registering and binding all network related classes.
|
||||
*/
|
||||
public class NetworkModule extends AbstractModule {
|
||||
|
||||
private final NetworkService networkService;
|
||||
public static final String TRANSPORT_TYPE_KEY = "transport.type";
|
||||
public static final String TRANSPORT_SERVICE_TYPE_KEY = "transport.service.type";
|
||||
|
||||
public NetworkModule(NetworkService networkService) {
|
||||
public static final String LOCAL_TRANSPORT = "local";
|
||||
public static final String NETTY_TRANSPORT = "netty";
|
||||
|
||||
public static final String HTTP_TYPE_KEY = "http.type";
|
||||
public static final String HTTP_ENABLED = "http.enabled";
|
||||
|
||||
private static final List<Class<? extends RestHandler>> builtinRestHandlers = Arrays.asList(
|
||||
RestMainAction.class,
|
||||
|
||||
RestNodesInfoAction.class,
|
||||
RestNodesStatsAction.class,
|
||||
RestNodesHotThreadsAction.class,
|
||||
RestClusterStatsAction.class,
|
||||
RestClusterStateAction.class,
|
||||
RestClusterHealthAction.class,
|
||||
RestClusterUpdateSettingsAction.class,
|
||||
RestClusterGetSettingsAction.class,
|
||||
RestClusterRerouteAction.class,
|
||||
RestClusterSearchShardsAction.class,
|
||||
RestPendingClusterTasksAction.class,
|
||||
RestPutRepositoryAction.class,
|
||||
RestGetRepositoriesAction.class,
|
||||
RestDeleteRepositoryAction.class,
|
||||
RestVerifyRepositoryAction.class,
|
||||
RestGetSnapshotsAction.class,
|
||||
RestCreateSnapshotAction.class,
|
||||
RestRestoreSnapshotAction.class,
|
||||
RestDeleteSnapshotAction.class,
|
||||
RestSnapshotsStatusAction.class,
|
||||
|
||||
RestIndicesExistsAction.class,
|
||||
RestTypesExistsAction.class,
|
||||
RestGetIndicesAction.class,
|
||||
RestIndicesStatsAction.class,
|
||||
RestIndicesSegmentsAction.class,
|
||||
RestIndicesShardStoresAction.class,
|
||||
RestGetAliasesAction.class,
|
||||
RestAliasesExistAction.class,
|
||||
RestIndexDeleteAliasesAction.class,
|
||||
RestIndexPutAliasAction.class,
|
||||
RestIndicesAliasesAction.class,
|
||||
RestGetIndicesAliasesAction.class,
|
||||
RestCreateIndexAction.class,
|
||||
RestDeleteIndexAction.class,
|
||||
RestCloseIndexAction.class,
|
||||
RestOpenIndexAction.class,
|
||||
|
||||
RestUpdateSettingsAction.class,
|
||||
RestGetSettingsAction.class,
|
||||
|
||||
RestAnalyzeAction.class,
|
||||
RestGetIndexTemplateAction.class,
|
||||
RestPutIndexTemplateAction.class,
|
||||
RestDeleteIndexTemplateAction.class,
|
||||
RestHeadIndexTemplateAction.class,
|
||||
|
||||
RestPutWarmerAction.class,
|
||||
RestDeleteWarmerAction.class,
|
||||
RestGetWarmerAction.class,
|
||||
|
||||
RestPutMappingAction.class,
|
||||
RestGetMappingAction.class,
|
||||
RestGetFieldMappingAction.class,
|
||||
|
||||
RestRefreshAction.class,
|
||||
RestFlushAction.class,
|
||||
RestSyncedFlushAction.class,
|
||||
RestForceMergeAction.class,
|
||||
RestUpgradeAction.class,
|
||||
RestClearIndicesCacheAction.class,
|
||||
|
||||
RestIndexAction.class,
|
||||
RestGetAction.class,
|
||||
RestGetSourceAction.class,
|
||||
RestHeadAction.class,
|
||||
RestMultiGetAction.class,
|
||||
RestDeleteAction.class,
|
||||
org.elasticsearch.rest.action.count.RestCountAction.class,
|
||||
RestSuggestAction.class,
|
||||
RestTermVectorsAction.class,
|
||||
RestMultiTermVectorsAction.class,
|
||||
RestBulkAction.class,
|
||||
RestUpdateAction.class,
|
||||
RestPercolateAction.class,
|
||||
RestMultiPercolateAction.class,
|
||||
|
||||
RestSearchAction.class,
|
||||
RestSearchScrollAction.class,
|
||||
RestClearScrollAction.class,
|
||||
RestMultiSearchAction.class,
|
||||
RestRenderSearchTemplateAction.class,
|
||||
|
||||
RestValidateQueryAction.class,
|
||||
|
||||
RestExplainAction.class,
|
||||
|
||||
RestRecoveryAction.class,
|
||||
|
||||
// Templates API
|
||||
RestGetSearchTemplateAction.class,
|
||||
RestPutSearchTemplateAction.class,
|
||||
RestDeleteSearchTemplateAction.class,
|
||||
|
||||
// Scripts API
|
||||
RestGetIndexedScriptAction.class,
|
||||
RestPutIndexedScriptAction.class,
|
||||
RestDeleteIndexedScriptAction.class,
|
||||
|
||||
RestFieldStatsAction.class,
|
||||
|
||||
// no abstract cat action
|
||||
RestCatAction.class
|
||||
);
|
||||
|
||||
private static final List<Class<? extends AbstractCatAction>> builtinCatHandlers = Arrays.asList(
|
||||
RestAllocationAction.class,
|
||||
RestShardsAction.class,
|
||||
RestMasterAction.class,
|
||||
RestNodesAction.class,
|
||||
RestIndicesAction.class,
|
||||
RestSegmentsAction.class,
|
||||
// Fully qualified to prevent interference with rest.action.count.RestCountAction
|
||||
org.elasticsearch.rest.action.cat.RestCountAction.class,
|
||||
// Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction
|
||||
org.elasticsearch.rest.action.cat.RestRecoveryAction.class,
|
||||
RestHealthAction.class,
|
||||
org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction.class,
|
||||
RestAliasAction.class,
|
||||
RestThreadPoolAction.class,
|
||||
RestPluginsAction.class,
|
||||
RestFielddataAction.class,
|
||||
RestNodeAttrsAction.class,
|
||||
RestRepositoriesAction.class,
|
||||
RestSnapshotAction.class
|
||||
);
|
||||
|
||||
private final NetworkService networkService;
|
||||
private final Settings settings;
|
||||
private final boolean transportClient;
|
||||
|
||||
private final ExtensionPoint.SelectedType<TransportService> transportServiceTypes = new ExtensionPoint.SelectedType<>("transport_service", TransportService.class);
|
||||
private final ExtensionPoint.SelectedType<Transport> transportTypes = new ExtensionPoint.SelectedType<>("transport", Transport.class);
|
||||
private final ExtensionPoint.SelectedType<HttpServerTransport> httpTransportTypes = new ExtensionPoint.SelectedType<>("http_transport", HttpServerTransport.class);
|
||||
private final ExtensionPoint.ClassSet<RestHandler> restHandlers = new ExtensionPoint.ClassSet<>("rest_handler", RestHandler.class);
|
||||
// we must separate the cat rest handlers so RestCatAction can collect them...
|
||||
private final ExtensionPoint.ClassSet<AbstractCatAction> catHandlers = new ExtensionPoint.ClassSet<>("cat_handler", AbstractCatAction.class);
|
||||
|
||||
/**
|
||||
* Creates a network module that custom networking classes can be plugged into.
|
||||
*
|
||||
* @param networkService A constructed network service object to bind.
|
||||
* @param settings The settings for the node
|
||||
* @param transportClient True if only transport classes should be allowed to be registered, false otherwise.
|
||||
*/
|
||||
public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient) {
|
||||
this.networkService = networkService;
|
||||
this.settings = settings;
|
||||
this.transportClient = transportClient;
|
||||
registerTransportService(NETTY_TRANSPORT, TransportService.class);
|
||||
registerTransport(LOCAL_TRANSPORT, LocalTransport.class);
|
||||
registerTransport(NETTY_TRANSPORT, NettyTransport.class);
|
||||
|
||||
if (transportClient == false) {
|
||||
registerHttpTransport(NETTY_TRANSPORT, NettyHttpServerTransport.class);
|
||||
|
||||
for (Class<? extends AbstractCatAction> catAction : builtinCatHandlers) {
|
||||
catHandlers.registerExtension(catAction);
|
||||
}
|
||||
for (Class<? extends RestHandler> restAction : builtinRestHandlers) {
|
||||
restHandlers.registerExtension(restAction);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a transport service implementation that can be selected by setting {@link #TRANSPORT_SERVICE_TYPE_KEY}. */
|
||||
public void registerTransportService(String name, Class<? extends TransportService> clazz) {
|
||||
transportServiceTypes.registerExtension(name, clazz);
|
||||
}
|
||||
|
||||
/** Adds a transport implementation that can be selected by setting {@link #TRANSPORT_TYPE_KEY}. */
|
||||
public void registerTransport(String name, Class<? extends Transport> clazz) {
|
||||
transportTypes.registerExtension(name, clazz);
|
||||
}
|
||||
|
||||
/** Adds an http transport implementation that can be selected by setting {@link #HTTP_TYPE_KEY}. */
|
||||
// TODO: we need another name than "http transport"....so confusing with transportClient...
|
||||
public void registerHttpTransport(String name, Class<? extends HttpServerTransport> clazz) {
|
||||
if (transportClient) {
|
||||
throw new IllegalArgumentException("Cannot register http transport " + clazz.getName() + " for transport client");
|
||||
}
|
||||
httpTransportTypes.registerExtension(name, clazz);
|
||||
}
|
||||
|
||||
/** Adds an additional rest action. */
|
||||
// TODO: change this further to eliminate the middle man, ie RestController, and just register method and path here
|
||||
public void registerRestHandler(Class<? extends RestHandler> clazz) {
|
||||
if (transportClient) {
|
||||
throw new IllegalArgumentException("Cannot register rest handler " + clazz.getName() + " for transport client");
|
||||
}
|
||||
if (AbstractCatAction.class.isAssignableFrom(clazz)) {
|
||||
catHandlers.registerExtension(clazz.asSubclass(AbstractCatAction.class));
|
||||
} else {
|
||||
restHandlers.registerExtension(clazz);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(NetworkService.class).toInstance(networkService);
|
||||
bind(NamedWriteableRegistry.class).asEagerSingleton();
|
||||
|
||||
transportServiceTypes.bindType(binder(), settings, TRANSPORT_SERVICE_TYPE_KEY, NETTY_TRANSPORT);
|
||||
String defaultTransport = DiscoveryNode.localNode(settings) ? LOCAL_TRANSPORT : NETTY_TRANSPORT;
|
||||
transportTypes.bindType(binder(), settings, TRANSPORT_TYPE_KEY, defaultTransport);
|
||||
|
||||
if (transportClient) {
|
||||
bind(Headers.class).asEagerSingleton();
|
||||
bind(TransportProxyClient.class).asEagerSingleton();
|
||||
bind(TransportClientNodesService.class).asEagerSingleton();
|
||||
} else {
|
||||
if (settings.getAsBoolean(HTTP_ENABLED, true)) {
|
||||
bind(HttpServer.class).asEagerSingleton();
|
||||
httpTransportTypes.bindType(binder(), settings, HTTP_TYPE_KEY, NETTY_TRANSPORT);
|
||||
}
|
||||
bind(RestController.class).asEagerSingleton();
|
||||
catHandlers.bind(binder());
|
||||
restHandlers.bind(binder());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,12 @@ package org.elasticsearch.env;
|
||||
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.SegmentInfos;
|
||||
import org.apache.lucene.store.*;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.NativeFSLockFactory;
|
||||
import org.apache.lucene.store.SimpleFSDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
@ -31,6 +36,7 @@ import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
@ -38,11 +44,25 @@ import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.store.FsDirectoryService;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.fs.FsProbe;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.*;
|
||||
import java.util.*;
|
||||
import java.nio.file.AtomicMoveNotSupportedException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.FileStore;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
@ -145,7 +165,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
||||
for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) {
|
||||
Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(NODES_FOLDER).resolve(Integer.toString(possibleLockId));
|
||||
Files.createDirectories(dir);
|
||||
|
||||
|
||||
try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) {
|
||||
logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath());
|
||||
try {
|
||||
@ -187,6 +207,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
||||
}
|
||||
|
||||
maybeLogPathDetails();
|
||||
maybeLogHeapDetails();
|
||||
|
||||
if (settings.getAsBoolean(SETTING_ENABLE_LUCENE_SEGMENT_INFOS_TRACE, false)) {
|
||||
SegmentInfos.setInfoStream(System.out);
|
||||
@ -274,6 +295,13 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeLogHeapDetails() {
|
||||
ByteSizeValue maxHeapSize = JvmInfo.jvmInfo().getMem().getHeapMax();
|
||||
Boolean usingCompressedOops = JvmInfo.jvmInfo().usingCompressedOops();
|
||||
String usingCompressedOopsStatus = usingCompressedOops == null ? "unknown" : Boolean.toString(usingCompressedOops);
|
||||
logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, usingCompressedOopsStatus);
|
||||
}
|
||||
|
||||
private static String toString(Collection<String> items) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
for(String item : items) {
|
||||
@ -811,7 +839,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
||||
// Sanity check:
|
||||
assert Integer.parseInt(shardPath.getName(count-1).toString()) >= 0;
|
||||
assert "indices".equals(shardPath.getName(count-3).toString());
|
||||
|
||||
|
||||
return shardPath.getParent().getParent().getParent();
|
||||
}
|
||||
}
|
||||
|
@ -1,59 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.http;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.http.netty.NettyHttpServerTransport;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HttpServerModule extends AbstractModule {
|
||||
|
||||
private final Settings settings;
|
||||
private final ESLogger logger;
|
||||
|
||||
private Class<? extends HttpServerTransport> httpServerTransportClass;
|
||||
|
||||
public HttpServerModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
this.logger = Loggers.getLogger(getClass(), settings);
|
||||
this.httpServerTransportClass = NettyHttpServerTransport.class;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(HttpServerTransport.class).to(httpServerTransportClass).asEagerSingleton();
|
||||
bind(HttpServer.class).asEagerSingleton();
|
||||
}
|
||||
|
||||
public void setHttpServerTransport(Class<? extends HttpServerTransport> httpServerTransport, String source) {
|
||||
Objects.requireNonNull(httpServerTransport, "Configured http server transport may not be null");
|
||||
Objects.requireNonNull(source, "Plugin, that changes transport may not be null");
|
||||
logger.info("Using [{}] as http transport, overridden by [{}]", httpServerTransportClass.getName(), source);
|
||||
this.httpServerTransportClass = httpServerTransport;
|
||||
}
|
||||
}
|
@ -64,10 +64,10 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
protected final MultiFields.Builder multiFieldsBuilder;
|
||||
protected CopyTo copyTo;
|
||||
|
||||
protected Builder(String name, MappedFieldType fieldType) {
|
||||
protected Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) {
|
||||
super(name);
|
||||
this.fieldType = fieldType.clone();
|
||||
this.defaultFieldType = fieldType.clone();
|
||||
this.defaultFieldType = defaultFieldType.clone();
|
||||
this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable
|
||||
multiFieldsBuilder = new MultiFields.Builder();
|
||||
}
|
||||
|
@ -51,8 +51,8 @@ public abstract class MetadataFieldMapper extends FieldMapper {
|
||||
}
|
||||
|
||||
public abstract static class Builder<T extends Builder, Y extends MetadataFieldMapper> extends FieldMapper.Builder<T, Y> {
|
||||
public Builder(String name, MappedFieldType fieldType) {
|
||||
super(name, fieldType);
|
||||
public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) {
|
||||
super(name, fieldType, defaultFieldType);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ public class BinaryFieldMapper extends FieldMapper {
|
||||
public static class Builder extends FieldMapper.Builder<Builder, BinaryFieldMapper> {
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
||||
public static class Builder extends FieldMapper.Builder<Builder, BooleanFieldMapper> {
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
|
@ -356,7 +356,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
||||
* @param name of the completion field to build
|
||||
*/
|
||||
public Builder(String name) {
|
||||
super(name, new CompletionFieldType());
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
|
||||
private Boolean coerce;
|
||||
|
||||
public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) {
|
||||
super(name, fieldType);
|
||||
super(name, fieldType, fieldType);
|
||||
this.fieldType.setNumericPrecisionStep(defaultPrecisionStep);
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||
protected int ignoreAbove = Defaults.IGNORE_ABOVE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
protected Boolean ignoreMalformed;
|
||||
|
||||
public Builder(String name, GeoPointFieldType fieldType) {
|
||||
super(name, fieldType);
|
||||
super(name, fieldType, fieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -120,7 +120,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
||||
private Boolean coerce;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -101,7 +101,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
||||
private EnabledAttributeMapper enabled = Defaults.ENABLED;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
indexName = Defaults.INDEX_NAME;
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
||||
private boolean enabled = Defaults.ENABLED;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
|
@ -89,7 +89,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
||||
private String path = Defaults.PATH;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
||||
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
||||
private final MappedFieldType childJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
|
||||
|
||||
public Builder(String documentType) {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
this.indexName = name;
|
||||
this.documentType = documentType;
|
||||
builder = this;
|
||||
|
@ -77,7 +77,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
||||
private String path = Defaults.PATH;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
||||
public Builder required(boolean required) {
|
||||
|
@ -88,7 +88,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
||||
private String[] excludes = null;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
||||
public Builder enabled(boolean enabled) {
|
||||
|
@ -78,7 +78,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
||||
private long defaultTTL = Defaults.DEFAULT;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.TTL_FIELD_TYPE);
|
||||
super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
||||
public Builder enabled(EnabledAttributeMapper enabled) {
|
||||
|
@ -96,7 +96,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
||||
private Boolean ignoreMissing = null;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
if (existing != null) {
|
||||
// if there is an existing type, always use that store value (only matters for < 2.0)
|
||||
explicitStore = true;
|
||||
|
@ -80,7 +80,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, TypeFieldMapper> {
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, UidFieldMapper> {
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, VersionFieldMapper> {
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -63,6 +63,7 @@ import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -364,7 +365,7 @@ public class QueryShardContext {
|
||||
* Executes the given template, and returns the response.
|
||||
*/
|
||||
public BytesReference executeQueryTemplate(Template template, SearchContext searchContext) {
|
||||
ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext);
|
||||
ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext, Collections.emptyMap());
|
||||
return (BytesReference) executable.run();
|
||||
}
|
||||
|
||||
|
@ -33,6 +33,7 @@ import org.elasticsearch.script.Script.ScriptField;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder> {
|
||||
@ -80,7 +81,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
||||
|
||||
public ScriptQuery(Script script, ScriptService scriptService, SearchLookup searchLookup) {
|
||||
this.script = script;
|
||||
this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH);
|
||||
this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -161,4 +162,4 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
||||
protected boolean doEquals(ScriptQueryBuilder other) {
|
||||
return Objects.equals(script, other.script);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -89,10 +90,10 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder<ScriptScore
|
||||
@Override
|
||||
protected ScoreFunction doToFunction(QueryShardContext context) {
|
||||
try {
|
||||
SearchScript searchScript = context.getScriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH);
|
||||
SearchScript searchScript = context.getScriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
return new ScriptScoreFunction(script, searchScript);
|
||||
} catch (Exception e) {
|
||||
throw new QueryShardException(context, "script_score: the script could not be loaded", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -29,7 +29,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.management.*;
|
||||
import java.lang.management.GarbageCollectorMXBean;
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.ManagementPermission;
|
||||
import java.lang.management.MemoryMXBean;
|
||||
import java.lang.management.MemoryPoolMXBean;
|
||||
import java.lang.management.PlatformManagedObject;
|
||||
import java.lang.management.RuntimeMXBean;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@ -101,6 +108,21 @@ public class JvmInfo implements Streamable, ToXContent {
|
||||
info.memoryPools[i] = memoryPoolMXBean.getName();
|
||||
}
|
||||
|
||||
try {
|
||||
@SuppressWarnings("unchecked") Class<? extends PlatformManagedObject> clazz =
|
||||
(Class<? extends PlatformManagedObject>)Class.forName("com.sun.management.HotSpotDiagnosticMXBean");
|
||||
Class<?> vmOptionClazz = Class.forName("com.sun.management.VMOption");
|
||||
PlatformManagedObject hotSpotDiagnosticMXBean = ManagementFactory.getPlatformMXBean(clazz);
|
||||
Method vmOptionMethod = clazz.getMethod("getVMOption", String.class);
|
||||
Object useCompressedOopsVmOption = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseCompressedOops");
|
||||
Method valueMethod = vmOptionClazz.getMethod("getValue");
|
||||
String value = (String)valueMethod.invoke(useCompressedOopsVmOption);
|
||||
info.usingCompressedOops = Boolean.parseBoolean(value);
|
||||
} catch (Throwable t) {
|
||||
// unable to deduce the state of compressed oops
|
||||
// usingCompressedOops will hold its default value of null
|
||||
}
|
||||
|
||||
INSTANCE = info;
|
||||
}
|
||||
|
||||
@ -135,6 +157,8 @@ public class JvmInfo implements Streamable, ToXContent {
|
||||
String[] gcCollectors = Strings.EMPTY_ARRAY;
|
||||
String[] memoryPools = Strings.EMPTY_ARRAY;
|
||||
|
||||
private Boolean usingCompressedOops;
|
||||
|
||||
private JvmInfo() {
|
||||
}
|
||||
|
||||
@ -258,6 +282,10 @@ public class JvmInfo implements Streamable, ToXContent {
|
||||
return this.systemProperties;
|
||||
}
|
||||
|
||||
public Boolean usingCompressedOops() {
|
||||
return this.usingCompressedOops;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.JVM);
|
||||
@ -279,6 +307,8 @@ public class JvmInfo implements Streamable, ToXContent {
|
||||
builder.field(Fields.GC_COLLECTORS, gcCollectors);
|
||||
builder.field(Fields.MEMORY_POOLS, memoryPools);
|
||||
|
||||
builder.field(Fields.USING_COMPRESSED_OOPS, usingCompressedOops == null ? "unknown" : Boolean.toString(usingCompressedOops));
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
@ -306,6 +336,7 @@ public class JvmInfo implements Streamable, ToXContent {
|
||||
static final XContentBuilderString DIRECT_MAX_IN_BYTES = new XContentBuilderString("direct_max_in_bytes");
|
||||
static final XContentBuilderString GC_COLLECTORS = new XContentBuilderString("gc_collectors");
|
||||
static final XContentBuilderString MEMORY_POOLS = new XContentBuilderString("memory_pools");
|
||||
static final XContentBuilderString USING_COMPRESSED_OOPS = new XContentBuilderString("using_compressed_ordinary_object_pointers");
|
||||
}
|
||||
|
||||
public static JvmInfo readJvmInfo(StreamInput in) throws IOException {
|
||||
@ -337,6 +368,11 @@ public class JvmInfo implements Streamable, ToXContent {
|
||||
mem.readFrom(in);
|
||||
gcCollectors = in.readStringArray();
|
||||
memoryPools = in.readStringArray();
|
||||
if (in.readBoolean()) {
|
||||
usingCompressedOops = in.readBoolean();
|
||||
} else {
|
||||
usingCompressedOops = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -361,6 +397,12 @@ public class JvmInfo implements Streamable, ToXContent {
|
||||
mem.writeTo(out);
|
||||
out.writeStringArray(gcCollectors);
|
||||
out.writeStringArray(memoryPools);
|
||||
if (usingCompressedOops != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeBoolean(usingCompressedOops);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Mem implements Streamable {
|
||||
|
@ -32,7 +32,6 @@ import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
|
||||
import org.elasticsearch.cluster.routing.RoutingService;
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.component.Lifecycle;
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
@ -62,7 +61,6 @@ import org.elasticsearch.gateway.GatewayAllocator;
|
||||
import org.elasticsearch.gateway.GatewayModule;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.http.HttpServer;
|
||||
import org.elasticsearch.http.HttpServerModule;
|
||||
import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
@ -85,7 +83,6 @@ import org.elasticsearch.plugins.PluginsModule;
|
||||
import org.elasticsearch.plugins.PluginsService;
|
||||
import org.elasticsearch.repositories.RepositoriesModule;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
@ -94,7 +91,6 @@ import org.elasticsearch.snapshots.SnapshotShardsService;
|
||||
import org.elasticsearch.snapshots.SnapshotsService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.tribe.TribeModule;
|
||||
import org.elasticsearch.tribe.TribeService;
|
||||
@ -107,7 +103,6 @@ import java.net.Inet6Address;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.CopyOption;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
@ -183,20 +178,15 @@ public class Node implements Releasable {
|
||||
}
|
||||
modules.add(new PluginsModule(pluginsService));
|
||||
modules.add(new SettingsModule(this.settings, settingsFilter));
|
||||
modules.add(new NodeModule(this,monitorService));
|
||||
modules.add(new NetworkModule(networkService));
|
||||
modules.add(new ScriptModule(this.settings));
|
||||
modules.add(new EnvironmentModule(environment));
|
||||
modules.add(new NodeModule(this, monitorService));
|
||||
modules.add(new NetworkModule(networkService, settings, false));
|
||||
modules.add(new ScriptModule(this.settings));
|
||||
modules.add(new NodeEnvironmentModule(nodeEnvironment));
|
||||
modules.add(new ClusterNameModule(this.settings));
|
||||
modules.add(new ThreadPoolModule(threadPool));
|
||||
modules.add(new DiscoveryModule(this.settings));
|
||||
modules.add(new ClusterModule(this.settings));
|
||||
modules.add(new RestModule(this.settings));
|
||||
modules.add(new TransportModule(settings));
|
||||
if (settings.getAsBoolean(HTTP_ENABLED, true)) {
|
||||
modules.add(new HttpServerModule(settings));
|
||||
}
|
||||
modules.add(new IndicesModule());
|
||||
modules.add(new SearchModule());
|
||||
modules.add(new ActionModule(false));
|
||||
|
@ -1,51 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.rest;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.action.RestActionModule;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class RestModule extends AbstractModule {
|
||||
|
||||
private final Settings settings;
|
||||
private List<Class<? extends BaseRestHandler>> restPluginsActions = new ArrayList<>();
|
||||
|
||||
public void addRestAction(Class<? extends BaseRestHandler> restAction) {
|
||||
restPluginsActions.add(restAction);
|
||||
}
|
||||
|
||||
public RestModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(RestController.class).asEagerSingleton();
|
||||
new RestActionModule(restPluginsActions).configure(binder());
|
||||
}
|
||||
}
|
@ -1,273 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.rest.action;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.put.RestPutRepositoryAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.verify.RestVerifyRepositoryAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.reroute.RestClusterRerouteAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterGetSettingsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.settings.RestClusterUpdateSettingsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.shards.RestClusterSearchShardsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.create.RestCreateSnapshotAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.delete.RestDeleteSnapshotAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.get.RestGetSnapshotsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.restore.RestRestoreSnapshotAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.snapshots.status.RestSnapshotsStatusAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.state.RestClusterStateAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.stats.RestClusterStatsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.tasks.RestPendingClusterTasksAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.RestIndicesAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.delete.RestIndexDeleteAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.get.RestGetIndicesAliasesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.head.RestAliasesExistAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.alias.put.RestIndexPutAliasAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.cache.clear.RestClearIndicesCacheAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.close.RestCloseIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.create.RestCreateIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.delete.RestDeleteIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.exists.indices.RestIndicesExistsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.exists.types.RestTypesExistsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.flush.RestFlushAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.flush.RestSyncedFlushAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.forcemerge.RestForceMergeAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.get.RestGetIndicesAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetFieldMappingAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.mapping.get.RestGetMappingAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.mapping.put.RestPutMappingAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.open.RestOpenIndexAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.recovery.RestRecoveryAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.refresh.RestRefreshAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.segments.RestIndicesSegmentsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.settings.RestGetSettingsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.settings.RestUpdateSettingsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.shards.RestIndicesShardStoresAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.stats.RestIndicesStatsAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.template.delete.RestDeleteIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.template.get.RestGetIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.template.head.RestHeadIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.template.put.RestPutIndexTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.upgrade.RestUpgradeAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.validate.query.RestValidateQueryAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.validate.template.RestRenderSearchTemplateAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.warmer.delete.RestDeleteWarmerAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.warmer.get.RestGetWarmerAction;
|
||||
import org.elasticsearch.rest.action.admin.indices.warmer.put.RestPutWarmerAction;
|
||||
import org.elasticsearch.rest.action.bulk.RestBulkAction;
|
||||
import org.elasticsearch.rest.action.cat.AbstractCatAction;
|
||||
import org.elasticsearch.rest.action.cat.RestAliasAction;
|
||||
import org.elasticsearch.rest.action.cat.RestAllocationAction;
|
||||
import org.elasticsearch.rest.action.cat.RestCatAction;
|
||||
import org.elasticsearch.rest.action.cat.RestFielddataAction;
|
||||
import org.elasticsearch.rest.action.cat.RestHealthAction;
|
||||
import org.elasticsearch.rest.action.cat.RestIndicesAction;
|
||||
import org.elasticsearch.rest.action.cat.RestMasterAction;
|
||||
import org.elasticsearch.rest.action.cat.RestNodeAttrsAction;
|
||||
import org.elasticsearch.rest.action.cat.RestNodesAction;
|
||||
import org.elasticsearch.rest.action.cat.RestPluginsAction;
|
||||
import org.elasticsearch.rest.action.cat.RestRepositoriesAction;
|
||||
import org.elasticsearch.rest.action.cat.RestSegmentsAction;
|
||||
import org.elasticsearch.rest.action.cat.RestShardsAction;
|
||||
import org.elasticsearch.rest.action.cat.RestSnapshotAction;
|
||||
import org.elasticsearch.rest.action.cat.RestThreadPoolAction;
|
||||
import org.elasticsearch.rest.action.delete.RestDeleteAction;
|
||||
import org.elasticsearch.rest.action.explain.RestExplainAction;
|
||||
import org.elasticsearch.rest.action.fieldstats.RestFieldStatsAction;
|
||||
import org.elasticsearch.rest.action.get.RestGetAction;
|
||||
import org.elasticsearch.rest.action.get.RestGetSourceAction;
|
||||
import org.elasticsearch.rest.action.get.RestHeadAction;
|
||||
import org.elasticsearch.rest.action.get.RestMultiGetAction;
|
||||
import org.elasticsearch.rest.action.index.RestIndexAction;
|
||||
import org.elasticsearch.rest.action.main.RestMainAction;
|
||||
import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction;
|
||||
import org.elasticsearch.rest.action.percolate.RestPercolateAction;
|
||||
import org.elasticsearch.rest.action.script.RestDeleteIndexedScriptAction;
|
||||
import org.elasticsearch.rest.action.script.RestGetIndexedScriptAction;
|
||||
import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction;
|
||||
import org.elasticsearch.rest.action.search.RestClearScrollAction;
|
||||
import org.elasticsearch.rest.action.search.RestMultiSearchAction;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.rest.action.search.RestSearchScrollAction;
|
||||
import org.elasticsearch.rest.action.suggest.RestSuggestAction;
|
||||
import org.elasticsearch.rest.action.template.RestDeleteSearchTemplateAction;
|
||||
import org.elasticsearch.rest.action.template.RestGetSearchTemplateAction;
|
||||
import org.elasticsearch.rest.action.template.RestPutSearchTemplateAction;
|
||||
import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction;
|
||||
import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction;
|
||||
import org.elasticsearch.rest.action.update.RestUpdateAction;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class RestActionModule extends AbstractModule {
|
||||
private List<Class<? extends BaseRestHandler>> restPluginsActions = new ArrayList<>();
|
||||
|
||||
public RestActionModule(List<Class<? extends BaseRestHandler>> restPluginsActions) {
|
||||
this.restPluginsActions = restPluginsActions;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
for (Class<? extends BaseRestHandler> restAction : restPluginsActions) {
|
||||
bind(restAction).asEagerSingleton();
|
||||
}
|
||||
|
||||
bind(RestMainAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestNodesInfoAction.class).asEagerSingleton();
|
||||
bind(RestNodesStatsAction.class).asEagerSingleton();
|
||||
bind(RestNodesHotThreadsAction.class).asEagerSingleton();
|
||||
bind(RestClusterStatsAction.class).asEagerSingleton();
|
||||
bind(RestClusterStateAction.class).asEagerSingleton();
|
||||
bind(RestClusterHealthAction.class).asEagerSingleton();
|
||||
bind(RestClusterUpdateSettingsAction.class).asEagerSingleton();
|
||||
bind(RestClusterGetSettingsAction.class).asEagerSingleton();
|
||||
bind(RestClusterRerouteAction.class).asEagerSingleton();
|
||||
bind(RestClusterSearchShardsAction.class).asEagerSingleton();
|
||||
bind(RestPendingClusterTasksAction.class).asEagerSingleton();
|
||||
bind(RestPutRepositoryAction.class).asEagerSingleton();
|
||||
bind(RestGetRepositoriesAction.class).asEagerSingleton();
|
||||
bind(RestDeleteRepositoryAction.class).asEagerSingleton();
|
||||
bind(RestVerifyRepositoryAction.class).asEagerSingleton();
|
||||
bind(RestGetSnapshotsAction.class).asEagerSingleton();
|
||||
bind(RestCreateSnapshotAction.class).asEagerSingleton();
|
||||
bind(RestRestoreSnapshotAction.class).asEagerSingleton();
|
||||
bind(RestDeleteSnapshotAction.class).asEagerSingleton();
|
||||
bind(RestSnapshotsStatusAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestIndicesExistsAction.class).asEagerSingleton();
|
||||
bind(RestTypesExistsAction.class).asEagerSingleton();
|
||||
bind(RestGetIndicesAction.class).asEagerSingleton();
|
||||
bind(RestIndicesStatsAction.class).asEagerSingleton();
|
||||
bind(RestIndicesSegmentsAction.class).asEagerSingleton();
|
||||
bind(RestIndicesShardStoresAction.class).asEagerSingleton();
|
||||
bind(RestGetAliasesAction.class).asEagerSingleton();
|
||||
bind(RestAliasesExistAction.class).asEagerSingleton();
|
||||
bind(RestIndexDeleteAliasesAction.class).asEagerSingleton();
|
||||
bind(RestIndexPutAliasAction.class).asEagerSingleton();
|
||||
bind(RestIndicesAliasesAction.class).asEagerSingleton();
|
||||
bind(RestGetIndicesAliasesAction.class).asEagerSingleton();
|
||||
bind(RestCreateIndexAction.class).asEagerSingleton();
|
||||
bind(RestDeleteIndexAction.class).asEagerSingleton();
|
||||
bind(RestCloseIndexAction.class).asEagerSingleton();
|
||||
bind(RestOpenIndexAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestUpdateSettingsAction.class).asEagerSingleton();
|
||||
bind(RestGetSettingsAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestAnalyzeAction.class).asEagerSingleton();
|
||||
bind(RestGetIndexTemplateAction.class).asEagerSingleton();
|
||||
bind(RestPutIndexTemplateAction.class).asEagerSingleton();
|
||||
bind(RestDeleteIndexTemplateAction.class).asEagerSingleton();
|
||||
bind(RestHeadIndexTemplateAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestPutWarmerAction.class).asEagerSingleton();
|
||||
bind(RestDeleteWarmerAction.class).asEagerSingleton();
|
||||
bind(RestGetWarmerAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestPutMappingAction.class).asEagerSingleton();
|
||||
bind(RestGetMappingAction.class).asEagerSingleton();
|
||||
bind(RestGetFieldMappingAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestRefreshAction.class).asEagerSingleton();
|
||||
bind(RestFlushAction.class).asEagerSingleton();
|
||||
bind(RestSyncedFlushAction.class).asEagerSingleton();
|
||||
bind(RestForceMergeAction.class).asEagerSingleton();
|
||||
bind(RestUpgradeAction.class).asEagerSingleton();
|
||||
bind(RestClearIndicesCacheAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestIndexAction.class).asEagerSingleton();
|
||||
bind(RestGetAction.class).asEagerSingleton();
|
||||
bind(RestGetSourceAction.class).asEagerSingleton();
|
||||
bind(RestHeadAction.class).asEagerSingleton();
|
||||
bind(RestMultiGetAction.class).asEagerSingleton();
|
||||
bind(RestDeleteAction.class).asEagerSingleton();
|
||||
bind(org.elasticsearch.rest.action.count.RestCountAction.class).asEagerSingleton();
|
||||
bind(RestSuggestAction.class).asEagerSingleton();
|
||||
bind(RestTermVectorsAction.class).asEagerSingleton();
|
||||
bind(RestMultiTermVectorsAction.class).asEagerSingleton();
|
||||
bind(RestBulkAction.class).asEagerSingleton();
|
||||
bind(RestUpdateAction.class).asEagerSingleton();
|
||||
bind(RestPercolateAction.class).asEagerSingleton();
|
||||
bind(RestMultiPercolateAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestSearchAction.class).asEagerSingleton();
|
||||
bind(RestSearchScrollAction.class).asEagerSingleton();
|
||||
bind(RestClearScrollAction.class).asEagerSingleton();
|
||||
bind(RestMultiSearchAction.class).asEagerSingleton();
|
||||
bind(RestRenderSearchTemplateAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestValidateQueryAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestExplainAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestRecoveryAction.class).asEagerSingleton();
|
||||
|
||||
// Templates API
|
||||
bind(RestGetSearchTemplateAction.class).asEagerSingleton();
|
||||
bind(RestPutSearchTemplateAction.class).asEagerSingleton();
|
||||
bind(RestDeleteSearchTemplateAction.class).asEagerSingleton();
|
||||
|
||||
// Scripts API
|
||||
bind(RestGetIndexedScriptAction.class).asEagerSingleton();
|
||||
bind(RestPutIndexedScriptAction.class).asEagerSingleton();
|
||||
bind(RestDeleteIndexedScriptAction.class).asEagerSingleton();
|
||||
|
||||
|
||||
bind(RestFieldStatsAction.class).asEagerSingleton();
|
||||
|
||||
// cat API
|
||||
Multibinder<AbstractCatAction> catActionMultibinder = Multibinder.newSetBinder(binder(), AbstractCatAction.class);
|
||||
catActionMultibinder.addBinding().to(RestAllocationAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestShardsAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestMasterAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestNodesAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestIndicesAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestSegmentsAction.class).asEagerSingleton();
|
||||
// Fully qualified to prevent interference with rest.action.count.RestCountAction
|
||||
catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestCountAction.class).asEagerSingleton();
|
||||
// Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction
|
||||
catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestRecoveryAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestHealthAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestAliasAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestThreadPoolAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestPluginsAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestFielddataAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestNodeAttrsAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestRepositoriesAction.class).asEagerSingleton();
|
||||
catActionMultibinder.addBinding().to(RestSnapshotAction.class).asEagerSingleton();
|
||||
// no abstract cat action
|
||||
bind(RestCatAction.class).asEagerSingleton();
|
||||
}
|
||||
}
|
@ -62,7 +62,7 @@ public class NativeScriptEngineService extends AbstractComponent implements Scri
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
NativeScriptFactory scriptFactory = scripts.get(script);
|
||||
if (scriptFactory != null) {
|
||||
return scriptFactory;
|
||||
|
@ -36,7 +36,7 @@ public interface ScriptEngineService extends Closeable {
|
||||
|
||||
boolean sandboxed();
|
||||
|
||||
Object compile(String script);
|
||||
Object compile(String script, Map<String, String> params);
|
||||
|
||||
ExecutableScript executable(CompiledScript compiledScript, @Nullable Map<String, Object> vars);
|
||||
|
||||
|
@ -67,6 +67,7 @@ import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
@ -96,9 +97,9 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
private final Map<String, ScriptEngineService> scriptEnginesByLang;
|
||||
private final Map<String, ScriptEngineService> scriptEnginesByExt;
|
||||
|
||||
private final ConcurrentMap<String, CompiledScript> staticCache = ConcurrentCollections.newConcurrentMap();
|
||||
private final ConcurrentMap<CacheKey, CompiledScript> staticCache = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
private final Cache<String, CompiledScript> cache;
|
||||
private final Cache<CacheKey, CompiledScript> cache;
|
||||
private final Path scriptsDirectory;
|
||||
|
||||
private final ScriptModes scriptModes;
|
||||
@ -153,7 +154,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
|
||||
this.defaultLang = settings.get(DEFAULT_SCRIPTING_LANGUAGE_SETTING, DEFAULT_LANG);
|
||||
|
||||
CacheBuilder<String, CompiledScript> cacheBuilder = CacheBuilder.builder();
|
||||
CacheBuilder<CacheKey, CompiledScript> cacheBuilder = CacheBuilder.builder();
|
||||
if (cacheMaxSize >= 0) {
|
||||
cacheBuilder.setMaximumWeight(cacheMaxSize);
|
||||
}
|
||||
@ -224,7 +225,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
/**
|
||||
* Checks if a script can be executed and compiles it if needed, or returns the previously compiled and cached script.
|
||||
*/
|
||||
public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext) {
|
||||
public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map<String, String> params) {
|
||||
if (script == null) {
|
||||
throw new IllegalArgumentException("The parameter script (Script) must not be null.");
|
||||
}
|
||||
@ -252,14 +253,14 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
" operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported");
|
||||
}
|
||||
|
||||
return compileInternal(script, headersContext);
|
||||
return compileInternal(script, headersContext, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles a script straight-away, or returns the previously compiled and cached script,
|
||||
* without checking if it can be executed based on settings.
|
||||
*/
|
||||
public CompiledScript compileInternal(Script script, HasContextAndHeaders context) {
|
||||
public CompiledScript compileInternal(Script script, HasContextAndHeaders context, Map<String, String> params) {
|
||||
if (script == null) {
|
||||
throw new IllegalArgumentException("The parameter script (Script) must not be null.");
|
||||
}
|
||||
@ -277,7 +278,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(lang);
|
||||
|
||||
if (type == ScriptType.FILE) {
|
||||
String cacheKey = getCacheKey(scriptEngineService, name, null);
|
||||
CacheKey cacheKey = new CacheKey(scriptEngineService, name, null, params);
|
||||
//On disk scripts will be loaded into the staticCache by the listener
|
||||
CompiledScript compiledScript = staticCache.get(cacheKey);
|
||||
|
||||
@ -299,14 +300,14 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
code = getScriptFromIndex(indexedScript.lang, indexedScript.id, context);
|
||||
}
|
||||
|
||||
String cacheKey = getCacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code);
|
||||
CacheKey cacheKey = new CacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code, params);
|
||||
CompiledScript compiledScript = cache.get(cacheKey);
|
||||
|
||||
if (compiledScript == null) {
|
||||
//Either an un-cached inline script or indexed script
|
||||
//If the script type is inline the name will be the same as the code for identification in exceptions
|
||||
try {
|
||||
compiledScript = new CompiledScript(type, name, lang, scriptEngineService.compile(code));
|
||||
compiledScript = new CompiledScript(type, name, lang, scriptEngineService.compile(code, params));
|
||||
} catch (Exception exception) {
|
||||
throw new ScriptException("Failed to compile " + type + " script [" + name + "] using lang [" + lang + "]", exception);
|
||||
}
|
||||
@ -364,7 +365,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
//we don't know yet what the script will be used for, but if all of the operations for this lang with
|
||||
//indexed scripts are disabled, it makes no sense to even compile it.
|
||||
if (isAnyScriptContextEnabled(scriptLang, scriptEngineService, ScriptType.INDEXED)) {
|
||||
Object compiled = scriptEngineService.compile(template.getScript());
|
||||
Object compiled = scriptEngineService.compile(template.getScript(), Collections.emptyMap());
|
||||
if (compiled == null) {
|
||||
throw new IllegalArgumentException("Unable to parse [" + template.getScript() +
|
||||
"] lang [" + scriptLang + "] (ScriptService.compile returned null)");
|
||||
@ -436,8 +437,8 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
/**
|
||||
* Compiles (or retrieves from cache) and executes the provided script
|
||||
*/
|
||||
public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext) {
|
||||
return executable(compile(script, scriptContext, headersContext), script.getParams());
|
||||
public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map<String, String> params) {
|
||||
return executable(compile(script, scriptContext, headersContext, params), script.getParams());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -450,8 +451,8 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
/**
|
||||
* Compiles (or retrieves from cache) and executes the provided search script
|
||||
*/
|
||||
public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext) {
|
||||
CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current());
|
||||
public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext, Map<String, String> params) {
|
||||
CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current(), params);
|
||||
return getScriptEngineServiceForLang(compiledScript.lang()).search(compiledScript, lookup, script.getParams());
|
||||
}
|
||||
|
||||
@ -491,9 +492,9 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
* {@code ScriptEngineService}'s {@code scriptRemoved} method when the
|
||||
* script has been removed from the cache
|
||||
*/
|
||||
private class ScriptCacheRemovalListener implements RemovalListener<String, CompiledScript> {
|
||||
private class ScriptCacheRemovalListener implements RemovalListener<CacheKey, CompiledScript> {
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<String, CompiledScript> notification) {
|
||||
public void onRemoval(RemovalNotification<CacheKey, CompiledScript> notification) {
|
||||
scriptMetrics.onCacheEviction();
|
||||
for (ScriptEngineService service : scriptEngines) {
|
||||
try {
|
||||
@ -539,8 +540,8 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
logger.info("compiling script file [{}]", file.toAbsolutePath());
|
||||
try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) {
|
||||
String script = Streams.copyToString(reader);
|
||||
String cacheKey = getCacheKey(engineService, scriptNameExt.v1(), null);
|
||||
staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script)));
|
||||
CacheKey cacheKey = new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap());
|
||||
staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script, Collections.emptyMap())));
|
||||
scriptMetrics.onCompilation();
|
||||
}
|
||||
} else {
|
||||
@ -565,7 +566,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2());
|
||||
assert engineService != null;
|
||||
logger.info("removing script file [{}]", file.toAbsolutePath());
|
||||
staticCache.remove(getCacheKey(engineService, scriptNameExt.v1(), null));
|
||||
staticCache.remove(new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -625,11 +626,44 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
}
|
||||
}
|
||||
|
||||
private static String getCacheKey(ScriptEngineService scriptEngineService, String name, String code) {
|
||||
String lang = scriptEngineService.types()[0];
|
||||
return lang + ":" + (name != null ? ":" + name : "") + (code != null ? ":" + code : "");
|
||||
private static final class CacheKey {
|
||||
final String lang;
|
||||
final String name;
|
||||
final String code;
|
||||
final Map<String, String> params;
|
||||
|
||||
private CacheKey(final ScriptEngineService service, final String name, final String code, final Map<String, String> params) {
|
||||
this.lang = service.types()[0];
|
||||
this.name = name;
|
||||
this.code = code;
|
||||
this.params = params;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
CacheKey cacheKey = (CacheKey)o;
|
||||
|
||||
if (!lang.equals(cacheKey.lang)) return false;
|
||||
if (name != null ? !name.equals(cacheKey.name) : cacheKey.name != null) return false;
|
||||
if (code != null ? !code.equals(cacheKey.code) : cacheKey.code != null) return false;
|
||||
return params.equals(cacheKey.params);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = lang.hashCode();
|
||||
result = 31 * result + (name != null ? name.hashCode() : 0);
|
||||
result = 31 * result + (code != null ? code.hashCode() : 0);
|
||||
result = 31 * result + params.hashCode();
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static class IndexedScript {
|
||||
private final String lang;
|
||||
private final String id;
|
||||
|
@ -92,6 +92,7 @@ import org.elasticsearch.search.warmer.IndexWarmersMetaData;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
@ -555,7 +556,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
||||
context.scrollContext().scroll = request.scroll();
|
||||
}
|
||||
if (request.template() != null) {
|
||||
ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context);
|
||||
ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context, Collections.emptyMap());
|
||||
BytesReference run = (BytesReference) executable.run();
|
||||
try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) {
|
||||
QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry());
|
||||
@ -830,7 +831,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
||||
}
|
||||
if (source.scriptFields() != null) {
|
||||
for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) {
|
||||
SearchScript searchScript = context.scriptService().search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH);
|
||||
SearchScript searchScript = context.scriptService().search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure()));
|
||||
}
|
||||
}
|
||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -82,7 +83,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||
|
||||
@Override
|
||||
public void initialize(InternalAggregation.ReduceContext context) {
|
||||
searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context);
|
||||
searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap());
|
||||
searchScript.setNextVar("_subset_freq", subsetDfHolder);
|
||||
searchScript.setNextVar("_subset_size", subsetSizeHolder);
|
||||
searchScript.setNextVar("_superset_freq", supersetDfHolder);
|
||||
@ -170,7 +171,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||
}
|
||||
ExecutableScript searchScript;
|
||||
try {
|
||||
searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context);
|
||||
searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap());
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. the script [{}] could not be loaded", e, script, heuristicName);
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -91,7 +92,7 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement
|
||||
vars.putAll(firstAggregation.reduceScript.getParams());
|
||||
}
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(firstAggregation.reduceScript,
|
||||
ScriptContext.Standard.AGGS, reduceContext);
|
||||
ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap());
|
||||
ExecutableScript script = reduceContext.scriptService().executable(compiledScript, vars);
|
||||
aggregation = script.run();
|
||||
} else {
|
||||
|
@ -39,6 +39,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -58,11 +59,11 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
|
||||
this.params = params;
|
||||
ScriptService scriptService = context.searchContext().scriptService();
|
||||
if (initScript != null) {
|
||||
scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext()).run();
|
||||
scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap()).run();
|
||||
}
|
||||
this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS);
|
||||
this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS, Collections.emptyMap());
|
||||
if (combineScript != null) {
|
||||
this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext());
|
||||
this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap());
|
||||
} else {
|
||||
this.combineScript = null;
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
||||
InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket> originalAgg = (InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>) aggregation;
|
||||
List<? extends Bucket> buckets = originalAgg.getBuckets();
|
||||
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext);
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap());
|
||||
List newBuckets = new ArrayList<>();
|
||||
for (Bucket bucket : buckets) {
|
||||
Map<String, Object> vars = new HashMap<>();
|
||||
|
@ -38,6 +38,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -88,7 +89,7 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator {
|
||||
InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket> originalAgg = (InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>) aggregation;
|
||||
List<? extends Bucket> buckets = originalAgg.getBuckets();
|
||||
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext);
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap());
|
||||
List newBuckets = new ArrayList<>();
|
||||
for (Bucket bucket : buckets) {
|
||||
Map<String, Object> vars = new HashMap<>();
|
||||
|
@ -43,6 +43,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -227,7 +228,7 @@ public class ValuesSourceParser<VS extends ValuesSource> {
|
||||
}
|
||||
|
||||
private SearchScript createScript() {
|
||||
return input.script == null ? null : context.scriptService().search(context.lookup(), input.script, ScriptContext.Standard.AGGS);
|
||||
return input.script == null ? null : context.scriptService().search(context.lookup(), input.script, ScriptContext.Standard.AGGS, Collections.emptyMap());
|
||||
}
|
||||
|
||||
private static ValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType) {
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -97,9 +98,9 @@ public class ScriptFieldsParseElement implements SearchParseElement {
|
||||
throw new SearchParseException(context, "must specify a script in script fields", parser.getTokenLocation());
|
||||
}
|
||||
|
||||
SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH);
|
||||
SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
context.scriptFields().add(new ScriptFieldsContext.ScriptField(fieldName, searchScript, ignoreException));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -50,6 +50,7 @@ import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -130,7 +131,7 @@ public class ScriptSortParser implements SortParser {
|
||||
if (type == null) {
|
||||
throw new SearchParseException(context, "_script sorting requires setting the type of the script", parser.getTokenLocation());
|
||||
}
|
||||
final SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH);
|
||||
final SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
|
||||
if (STRING_SORT_TYPE.equals(type) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) {
|
||||
throw new SearchParseException(context, "type [string] doesn't support mode [" + sortMode + "]", parser.getTokenLocation());
|
||||
|
@ -39,6 +39,7 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
|
||||
@ -143,7 +144,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
}
|
||||
Template template = Template.parse(parser, parseFieldMatcher);
|
||||
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH,
|
||||
headersContext);
|
||||
headersContext, Collections.emptyMap());
|
||||
suggestion.setCollateQueryScript(compiledScript);
|
||||
} else if ("params".equals(fieldName)) {
|
||||
suggestion.setCollateScriptParams(parser.map());
|
||||
|
@ -1,122 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.transport;
|
||||
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.transport.local.LocalTransport;
|
||||
import org.elasticsearch.transport.netty.NettyTransport;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class TransportModule extends AbstractModule {
|
||||
|
||||
public static final String TRANSPORT_TYPE_KEY = "transport.type";
|
||||
public static final String TRANSPORT_SERVICE_TYPE_KEY = "transport.service.type";
|
||||
|
||||
public static final String LOCAL_TRANSPORT = "local";
|
||||
public static final String NETTY_TRANSPORT = "netty";
|
||||
|
||||
private final ESLogger logger;
|
||||
private final Settings settings;
|
||||
|
||||
private final Map<String, Class<? extends TransportService>> transportServices = new HashMap<>();
|
||||
private final Map<String, Class<? extends Transport>> transports = new HashMap<>();
|
||||
private Class<? extends TransportService> configuredTransportService;
|
||||
private Class<? extends Transport> configuredTransport;
|
||||
private String configuredTransportServiceSource;
|
||||
private String configuredTransportSource;
|
||||
|
||||
public TransportModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
this.logger = Loggers.getLogger(getClass(), settings);
|
||||
addTransport(LOCAL_TRANSPORT, LocalTransport.class);
|
||||
addTransport(NETTY_TRANSPORT, NettyTransport.class);
|
||||
}
|
||||
|
||||
public void addTransportService(String name, Class<? extends TransportService> clazz) {
|
||||
Class<? extends TransportService> oldClazz = transportServices.put(name, clazz);
|
||||
if (oldClazz != null) {
|
||||
throw new IllegalArgumentException("Cannot register TransportService [" + name + "] to " + clazz.getName() + ", already registered to " + oldClazz.getName());
|
||||
}
|
||||
}
|
||||
|
||||
public void addTransport(String name, Class<? extends Transport> clazz) {
|
||||
Class<? extends Transport> oldClazz = transports.put(name, clazz);
|
||||
if (oldClazz != null) {
|
||||
throw new IllegalArgumentException("Cannot register Transport [" + name + "] to " + clazz.getName() + ", already registered to " + oldClazz.getName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
if (configuredTransportService != null) {
|
||||
logger.info("Using [{}] as transport service, overridden by [{}]", configuredTransportService.getName(), configuredTransportServiceSource);
|
||||
bind(TransportService.class).to(configuredTransportService).asEagerSingleton();
|
||||
} else {
|
||||
String typeName = settings.get(TRANSPORT_SERVICE_TYPE_KEY);
|
||||
if (typeName == null) {
|
||||
bind(TransportService.class).asEagerSingleton();
|
||||
} else {
|
||||
if (transportServices.containsKey(typeName) == false) {
|
||||
throw new IllegalArgumentException("Unknown TransportService type [" + typeName + "], known types are: " + transportServices.keySet());
|
||||
}
|
||||
bind(TransportService.class).to(transportServices.get(typeName)).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
|
||||
bind(NamedWriteableRegistry.class).asEagerSingleton();
|
||||
if (configuredTransport != null) {
|
||||
logger.info("Using [{}] as transport, overridden by [{}]", configuredTransport.getName(), configuredTransportSource);
|
||||
bind(Transport.class).to(configuredTransport).asEagerSingleton();
|
||||
} else {
|
||||
String defaultType = DiscoveryNode.localNode(settings) ? LOCAL_TRANSPORT : NETTY_TRANSPORT;
|
||||
String typeName = settings.get(TRANSPORT_TYPE_KEY, defaultType);
|
||||
Class<? extends Transport> clazz = transports.get(typeName);
|
||||
if (clazz == null) {
|
||||
throw new IllegalArgumentException("Unknown Transport [" + typeName + "]");
|
||||
}
|
||||
bind(Transport.class).to(clazz).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
|
||||
public void setTransportService(Class<? extends TransportService> transportService, String source) {
|
||||
Objects.requireNonNull(transportService, "Configured transport service may not be null");
|
||||
Objects.requireNonNull(source, "Plugin, that changes transport service may not be null");
|
||||
this.configuredTransportService = transportService;
|
||||
this.configuredTransportServiceSource = source;
|
||||
}
|
||||
|
||||
public void setTransport(Class<? extends Transport> transport, String source) {
|
||||
Objects.requireNonNull(transport, "Configured transport may not be null");
|
||||
Objects.requireNonNull(source, "Plugin, that changes transport may not be null");
|
||||
this.configuredTransport = transport;
|
||||
this.configuredTransportSource = source;
|
||||
}
|
||||
}
|
@ -0,0 +1,164 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.hamcrest.Matcher;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 2)
|
||||
public class BulkProcessorRetryIT extends ESIntegTestCase {
|
||||
private static final String INDEX_NAME = "test";
|
||||
private static final String TYPE_NAME = "type";
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
//Have very low pool and queue sizes to overwhelm internal pools easily
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("threadpool.generic.size", 1)
|
||||
.put("threadpool.generic.queue_size", 1)
|
||||
// don't mess with this one! It's quite sensitive to a low queue size
|
||||
// (see also ThreadedActionListener which is happily spawning threads even when we already got rejected)
|
||||
//.put("threadpool.listener.queue_size", 1)
|
||||
.put("threadpool.get.queue_size", 1)
|
||||
// default is 50
|
||||
.put("threadpool.bulk.queue_size", 20)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
public void testBulkRejectionLoadWithoutBackoff() throws Throwable {
|
||||
boolean rejectedExecutionExpected = true;
|
||||
executeBulkRejectionLoad(BackoffPolicy.noBackoff(), rejectedExecutionExpected);
|
||||
}
|
||||
|
||||
public void testBulkRejectionLoadWithBackoff() throws Throwable {
|
||||
boolean rejectedExecutionExpected = false;
|
||||
executeBulkRejectionLoad(BackoffPolicy.exponentialBackoff(), rejectedExecutionExpected);
|
||||
}
|
||||
|
||||
private void executeBulkRejectionLoad(BackoffPolicy backoffPolicy, boolean rejectedExecutionExpected) throws Throwable {
|
||||
int numberOfAsyncOps = randomIntBetween(600, 700);
|
||||
final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps);
|
||||
final Set<Object> responses = Collections.newSetFromMap(new ConcurrentHashMap<>());
|
||||
|
||||
assertAcked(prepareCreate(INDEX_NAME));
|
||||
ensureGreen();
|
||||
|
||||
BulkProcessor bulkProcessor = BulkProcessor.builder(client(), new BulkProcessor.Listener() {
|
||||
@Override
|
||||
public void beforeBulk(long executionId, BulkRequest request) {
|
||||
// no op
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
|
||||
responses.add(response);
|
||||
latch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
|
||||
responses.add(failure);
|
||||
latch.countDown();
|
||||
}
|
||||
}).setBulkActions(1)
|
||||
// zero means that we're in the sync case, more means that we're in the async case
|
||||
.setConcurrentRequests(randomIntBetween(0, 100))
|
||||
.setBackoffPolicy(backoffPolicy)
|
||||
.build();
|
||||
indexDocs(bulkProcessor, numberOfAsyncOps);
|
||||
latch.await(10, TimeUnit.SECONDS);
|
||||
bulkProcessor.close();
|
||||
|
||||
assertThat(responses.size(), equalTo(numberOfAsyncOps));
|
||||
|
||||
// validate all responses
|
||||
for (Object response : responses) {
|
||||
if (response instanceof BulkResponse) {
|
||||
BulkResponse bulkResponse = (BulkResponse) response;
|
||||
for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) {
|
||||
if (bulkItemResponse.isFailed()) {
|
||||
BulkItemResponse.Failure failure = bulkItemResponse.getFailure();
|
||||
Throwable rootCause = ExceptionsHelper.unwrapCause(failure.getCause());
|
||||
if (rootCause instanceof EsRejectedExecutionException) {
|
||||
if (rejectedExecutionExpected == false) {
|
||||
// we're not expecting that we overwhelmed it even once
|
||||
throw new AssertionError("Unexpected failure reason", rootCause);
|
||||
}
|
||||
} else {
|
||||
throw new AssertionError("Unexpected failure", rootCause);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Throwable t = (Throwable) response;
|
||||
// we're not expecting any other errors
|
||||
throw new AssertionError("Unexpected failure", t);
|
||||
}
|
||||
}
|
||||
|
||||
client().admin().indices().refresh(new RefreshRequest()).get();
|
||||
|
||||
// validate we did not create any duplicates due to retries
|
||||
Matcher<Long> searchResultCount;
|
||||
if (rejectedExecutionExpected) {
|
||||
// it is ok if we lost some index operations to rejected executions
|
||||
searchResultCount = lessThanOrEqualTo((long) numberOfAsyncOps);
|
||||
} else {
|
||||
searchResultCount = equalTo((long) numberOfAsyncOps);
|
||||
}
|
||||
|
||||
SearchResponse results = client()
|
||||
.prepareSearch(INDEX_NAME)
|
||||
.setTypes(TYPE_NAME)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setSize(0)
|
||||
.get();
|
||||
assertThat(results.getHits().totalHits(), searchResultCount);
|
||||
}
|
||||
|
||||
private static void indexDocs(BulkProcessor processor, int numDocs) {
|
||||
for (int i = 1; i <= numDocs; i++) {
|
||||
processor.add(client()
|
||||
.prepareIndex()
|
||||
.setIndex(INDEX_NAME)
|
||||
.setType(TYPE_NAME)
|
||||
.setId(Integer.toString(i))
|
||||
.setSource("field", randomRealisticUnicodeOfLengthBetween(1, 30))
|
||||
.request());
|
||||
}
|
||||
}
|
||||
}
|
219
core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java
Normal file
219
core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java
Normal file
@ -0,0 +1,219 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
import org.elasticsearch.rest.NoOpClient;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public class RetryTests extends ESTestCase {
|
||||
// no need to wait fof a long time in tests
|
||||
private static final TimeValue DELAY = TimeValue.timeValueMillis(1L);
|
||||
private static final int CALLS_TO_FAIL = 5;
|
||||
|
||||
private MockBulkClient bulkClient;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
this.bulkClient = new MockBulkClient(getTestName(), CALLS_TO_FAIL);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
this.bulkClient.close();
|
||||
}
|
||||
|
||||
private BulkRequest createBulkRequest() {
|
||||
BulkRequest request = new BulkRequest();
|
||||
request.add(new UpdateRequest("shop", "products", "1"));
|
||||
request.add(new UpdateRequest("shop", "products", "2"));
|
||||
request.add(new UpdateRequest("shop", "products", "3"));
|
||||
request.add(new UpdateRequest("shop", "products", "4"));
|
||||
request.add(new UpdateRequest("shop", "products", "5"));
|
||||
return request;
|
||||
}
|
||||
|
||||
public void testSyncRetryBacksOff() throws Exception {
|
||||
BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL);
|
||||
|
||||
BulkRequest bulkRequest = createBulkRequest();
|
||||
BulkResponse response = Retry
|
||||
.on(EsRejectedExecutionException.class)
|
||||
.policy(backoff)
|
||||
.withSyncBackoff(bulkClient, bulkRequest);
|
||||
|
||||
assertFalse(response.hasFailures());
|
||||
assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions()));
|
||||
}
|
||||
|
||||
public void testSyncRetryFailsAfterBackoff() throws Exception {
|
||||
BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1);
|
||||
|
||||
BulkRequest bulkRequest = createBulkRequest();
|
||||
BulkResponse response = Retry
|
||||
.on(EsRejectedExecutionException.class)
|
||||
.policy(backoff)
|
||||
.withSyncBackoff(bulkClient, bulkRequest);
|
||||
|
||||
assertTrue(response.hasFailures());
|
||||
assertThat(response.getItems().length, equalTo(bulkRequest.numberOfActions()));
|
||||
}
|
||||
|
||||
public void testAsyncRetryBacksOff() throws Exception {
|
||||
BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL);
|
||||
AssertingListener listener = new AssertingListener();
|
||||
|
||||
BulkRequest bulkRequest = createBulkRequest();
|
||||
Retry.on(EsRejectedExecutionException.class)
|
||||
.policy(backoff)
|
||||
.withAsyncBackoff(bulkClient, bulkRequest, listener);
|
||||
|
||||
listener.awaitCallbacksCalled();
|
||||
listener.assertOnResponseCalled();
|
||||
listener.assertResponseWithoutFailures();
|
||||
listener.assertResponseWithNumberOfItems(bulkRequest.numberOfActions());
|
||||
listener.assertOnFailureNeverCalled();
|
||||
}
|
||||
|
||||
public void testAsyncRetryFailsAfterBacksOff() throws Exception {
|
||||
BackoffPolicy backoff = BackoffPolicy.constantBackoff(DELAY, CALLS_TO_FAIL - 1);
|
||||
AssertingListener listener = new AssertingListener();
|
||||
|
||||
BulkRequest bulkRequest = createBulkRequest();
|
||||
Retry.on(EsRejectedExecutionException.class)
|
||||
.policy(backoff)
|
||||
.withAsyncBackoff(bulkClient, bulkRequest, listener);
|
||||
|
||||
listener.awaitCallbacksCalled();
|
||||
|
||||
listener.assertOnResponseCalled();
|
||||
listener.assertResponseWithFailures();
|
||||
listener.assertResponseWithNumberOfItems(bulkRequest.numberOfActions());
|
||||
listener.assertOnFailureNeverCalled();
|
||||
}
|
||||
|
||||
private static class AssertingListener implements ActionListener<BulkResponse> {
|
||||
private final CountDownLatch latch;
|
||||
private int countOnResponseCalled = 0;
|
||||
private Throwable lastFailure;
|
||||
private BulkResponse response;
|
||||
|
||||
private AssertingListener() {
|
||||
latch = new CountDownLatch(1);
|
||||
}
|
||||
|
||||
public void awaitCallbacksCalled() throws InterruptedException {
|
||||
latch.await();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResponse(BulkResponse bulkItemResponses) {
|
||||
latch.countDown();
|
||||
this.response = bulkItemResponses;
|
||||
countOnResponseCalled++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
latch.countDown();
|
||||
this.lastFailure = e;
|
||||
}
|
||||
|
||||
public void assertOnResponseCalled() {
|
||||
assertThat(countOnResponseCalled, equalTo(1));
|
||||
}
|
||||
|
||||
public void assertResponseWithNumberOfItems(int numItems) {
|
||||
assertThat(response.getItems().length, equalTo(numItems));
|
||||
}
|
||||
|
||||
public void assertResponseWithoutFailures() {
|
||||
assertThat(response, notNullValue());
|
||||
assertFalse("Response should not have failures", response.hasFailures());
|
||||
}
|
||||
|
||||
public void assertResponseWithFailures() {
|
||||
assertThat(response, notNullValue());
|
||||
assertTrue("Response should have failures", response.hasFailures());
|
||||
}
|
||||
|
||||
public void assertOnFailureNeverCalled() {
|
||||
assertThat(lastFailure, nullValue());
|
||||
}
|
||||
}
|
||||
|
||||
private static class MockBulkClient extends NoOpClient {
|
||||
private int numberOfCallsToFail;
|
||||
|
||||
private MockBulkClient(String testName, int numberOfCallsToFail) {
|
||||
super(testName);
|
||||
this.numberOfCallsToFail = numberOfCallsToFail;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionFuture<BulkResponse> bulk(BulkRequest request) {
|
||||
PlainActionFuture<BulkResponse> responseFuture = new PlainActionFuture<>();
|
||||
bulk(request, responseFuture);
|
||||
return responseFuture;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void bulk(BulkRequest request, ActionListener<BulkResponse> listener) {
|
||||
// do everything synchronously, that's fine for a test
|
||||
boolean shouldFail = numberOfCallsToFail > 0;
|
||||
numberOfCallsToFail--;
|
||||
|
||||
BulkItemResponse[] itemResponses = new BulkItemResponse[request.requests().size()];
|
||||
// if we have to fail, we need to fail at least once "reliably", the rest can be random
|
||||
int itemToFail = randomInt(request.requests().size() - 1);
|
||||
for (int idx = 0; idx < request.requests().size(); idx++) {
|
||||
if (shouldFail && (randomBoolean() || idx == itemToFail)) {
|
||||
itemResponses[idx] = failedResponse();
|
||||
} else {
|
||||
itemResponses[idx] = successfulResponse();
|
||||
}
|
||||
}
|
||||
listener.onResponse(new BulkResponse(itemResponses, 1000L));
|
||||
}
|
||||
|
||||
private BulkItemResponse successfulResponse() {
|
||||
return new BulkItemResponse(1, "update", new DeleteResponse());
|
||||
}
|
||||
|
||||
private BulkItemResponse failedResponse() {
|
||||
return new BulkItemResponse(1, "update", new BulkItemResponse.Failure("test", "test", "1", new EsRejectedExecutionException("pool full")));
|
||||
}
|
||||
}
|
||||
}
|
@ -32,6 +32,7 @@ import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
@ -40,7 +41,6 @@ import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.ConnectTransportException;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.transport.TransportRequestOptions;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
@ -114,12 +114,12 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase {
|
||||
public String description() {
|
||||
return "a mock transport service";
|
||||
}
|
||||
public void onModule(TransportModule transportModule) {
|
||||
transportModule.addTransportService("internal", InternalTransportService.class);
|
||||
public void onModule(NetworkModule transportModule) {
|
||||
transportModule.registerTransportService("internal", InternalTransportService.class);
|
||||
}
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
return Settings.builder().put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, "internal").build();
|
||||
return Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "internal").build();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -43,7 +43,14 @@ import org.elasticsearch.test.MockLogAppender;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
@ -53,7 +60,11 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -753,18 +764,30 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
||||
|
||||
class TaskExecutor implements ClusterStateTaskExecutor<Task> {
|
||||
private AtomicInteger counter = new AtomicInteger();
|
||||
private AtomicInteger batches = new AtomicInteger();
|
||||
private AtomicInteger published = new AtomicInteger();
|
||||
|
||||
@Override
|
||||
public BatchResult<Task> execute(ClusterState currentState, List<Task> tasks) throws Exception {
|
||||
tasks.forEach(task -> task.execute());
|
||||
counter.addAndGet(tasks.size());
|
||||
return BatchResult.<Task>builder().successes(tasks).build(currentState);
|
||||
ClusterState maybeUpdatedClusterState = currentState;
|
||||
if (randomBoolean()) {
|
||||
maybeUpdatedClusterState = ClusterState.builder(currentState).build();
|
||||
batches.incrementAndGet();
|
||||
}
|
||||
return BatchResult.<Task>builder().successes(tasks).build(maybeUpdatedClusterState);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean runOnlyOnMaster() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clusterStatePublished(ClusterState newClusterState) {
|
||||
published.incrementAndGet();
|
||||
}
|
||||
}
|
||||
int numberOfThreads = randomIntBetween(2, 8);
|
||||
int tasksSubmittedPerThread = randomIntBetween(1, 1024);
|
||||
@ -838,6 +861,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
||||
for (TaskExecutor executor : executors) {
|
||||
if (counts.containsKey(executor)) {
|
||||
assertEquals((int) counts.get(executor), executor.counter.get());
|
||||
assertEquals(executor.batches.get(), executor.published.get());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,6 +60,24 @@ public abstract class ModuleTestCase extends ESTestCase {
|
||||
fail("Did not find any binding to " + to.getName() + ". Found these bindings:\n" + s);
|
||||
}
|
||||
|
||||
/** Configures the module and asserts "clazz" is not bound to anything. */
|
||||
public void assertNotBound(Module module, Class clazz) {
|
||||
List<Element> elements = Elements.getElements(module);
|
||||
for (Element element : elements) {
|
||||
if (element instanceof LinkedKeyBinding) {
|
||||
LinkedKeyBinding binding = (LinkedKeyBinding) element;
|
||||
if (clazz.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
fail("Found binding for " + clazz.getName() + " to " + binding.getKey().getTypeLiteral().getType().getTypeName());
|
||||
}
|
||||
} else if (element instanceof UntargettedBinding) {
|
||||
UntargettedBinding binding = (UntargettedBinding) element;
|
||||
if (clazz.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
fail("Found binding for " + clazz.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to configure the module, and asserts an {@link IllegalArgumentException} is
|
||||
* caught, containing the given messages
|
||||
|
@ -0,0 +1,176 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.network;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Table;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.ModuleTestCase;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.http.HttpInfo;
|
||||
import org.elasticsearch.http.HttpServerAdapter;
|
||||
import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.http.HttpStats;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.cat.AbstractCatAction;
|
||||
import org.elasticsearch.rest.action.cat.RestNodesAction;
|
||||
import org.elasticsearch.rest.action.main.RestMainAction;
|
||||
import org.elasticsearch.test.transport.AssertingLocalTransport;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
public class NetworkModuleTests extends ModuleTestCase {
|
||||
|
||||
static class FakeTransportService extends TransportService {
|
||||
public FakeTransportService() {
|
||||
super(null, null);
|
||||
}
|
||||
}
|
||||
|
||||
static class FakeTransport extends AssertingLocalTransport {
|
||||
public FakeTransport() {
|
||||
super(null, null, null, null);
|
||||
}
|
||||
}
|
||||
|
||||
static class FakeHttpTransport extends AbstractLifecycleComponent<HttpServerTransport> implements HttpServerTransport {
|
||||
public FakeHttpTransport() {
|
||||
super(null);
|
||||
}
|
||||
@Override
|
||||
protected void doStart() {}
|
||||
@Override
|
||||
protected void doStop() {}
|
||||
@Override
|
||||
protected void doClose() {}
|
||||
@Override
|
||||
public BoundTransportAddress boundAddress() {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public HttpInfo info() {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public HttpStats stats() {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public void httpServerAdapter(HttpServerAdapter httpServerAdapter) {}
|
||||
}
|
||||
|
||||
static class FakeRestHandler extends BaseRestHandler {
|
||||
public FakeRestHandler() {
|
||||
super(null, null, null);
|
||||
}
|
||||
@Override
|
||||
protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {}
|
||||
}
|
||||
|
||||
static class FakeCatRestHandler extends AbstractCatAction {
|
||||
public FakeCatRestHandler() {
|
||||
super(null, null, null);
|
||||
}
|
||||
@Override
|
||||
protected void doRequest(RestRequest request, RestChannel channel, Client client) {}
|
||||
@Override
|
||||
protected void documentation(StringBuilder sb) {}
|
||||
@Override
|
||||
protected Table getTableWithHeader(RestRequest request) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public void testRegisterTransportService() {
|
||||
Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "custom").build();
|
||||
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false);
|
||||
module.registerTransportService("custom", FakeTransportService.class);
|
||||
assertBinding(module, TransportService.class, FakeTransportService.class);
|
||||
|
||||
// check it works with transport only as well
|
||||
module = new NetworkModule(new NetworkService(settings), settings, true);
|
||||
module.registerTransportService("custom", FakeTransportService.class);
|
||||
assertBinding(module, TransportService.class, FakeTransportService.class);
|
||||
}
|
||||
|
||||
public void testRegisterTransport() {
|
||||
Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom").build();
|
||||
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false);
|
||||
module.registerTransport("custom", FakeTransport.class);
|
||||
assertBinding(module, Transport.class, FakeTransport.class);
|
||||
|
||||
// check it works with transport only as well
|
||||
module = new NetworkModule(new NetworkService(settings), settings, true);
|
||||
module.registerTransport("custom", FakeTransport.class);
|
||||
assertBinding(module, Transport.class, FakeTransport.class);
|
||||
}
|
||||
|
||||
public void testRegisterHttpTransport() {
|
||||
Settings settings = Settings.builder().put(NetworkModule.HTTP_TYPE_KEY, "custom").build();
|
||||
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false);
|
||||
module.registerHttpTransport("custom", FakeHttpTransport.class);
|
||||
assertBinding(module, HttpServerTransport.class, FakeHttpTransport.class);
|
||||
|
||||
// check registration not allowed for transport only
|
||||
module = new NetworkModule(new NetworkService(settings), settings, true);
|
||||
try {
|
||||
module.registerHttpTransport("custom", FakeHttpTransport.class);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage().contains("Cannot register http transport"));
|
||||
assertTrue(e.getMessage().contains("for transport client"));
|
||||
}
|
||||
|
||||
// not added if http is disabled
|
||||
settings = Settings.builder().put(NetworkModule.HTTP_ENABLED, false).build();
|
||||
module = new NetworkModule(new NetworkService(settings), settings, false);
|
||||
assertNotBound(module, HttpServerTransport.class);
|
||||
}
|
||||
|
||||
public void testRegisterRestHandler() {
|
||||
Settings settings = Settings.EMPTY;
|
||||
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false);
|
||||
module.registerRestHandler(FakeRestHandler.class);
|
||||
// also check a builtin is bound
|
||||
assertSetMultiBinding(module, RestHandler.class, FakeRestHandler.class, RestMainAction.class);
|
||||
|
||||
// check registration not allowed for transport only
|
||||
module = new NetworkModule(new NetworkService(settings), settings, true);
|
||||
try {
|
||||
module.registerRestHandler(FakeRestHandler.class);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage().contains("Cannot register rest handler"));
|
||||
assertTrue(e.getMessage().contains("for transport client"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRegisterCatRestHandler() {
|
||||
Settings settings = Settings.EMPTY;
|
||||
NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false);
|
||||
module.registerRestHandler(FakeCatRestHandler.class);
|
||||
// also check a builtin is bound
|
||||
assertSetMultiBinding(module, AbstractCatAction.class, FakeCatRestHandler.class, RestNodesAction.class);
|
||||
}
|
||||
}
|
@ -81,7 +81,7 @@ public class ExternalMapper extends FieldMapper {
|
||||
private String mapperName;
|
||||
|
||||
public Builder(String name, String generatedValue, String mapperName) {
|
||||
super(name, new ExternalFieldType());
|
||||
super(name, new ExternalFieldType(), new ExternalFieldType());
|
||||
this.builder = this;
|
||||
this.stringBuilder = stringField(name).store(false);
|
||||
this.generatedValue = generatedValue;
|
||||
|
@ -98,7 +98,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, ExternalMetadataMapper> {
|
||||
|
||||
protected Builder() {
|
||||
super(CONTENT_TYPE, FIELD_TYPE);
|
||||
super(CONTENT_TYPE, FIELD_TYPE, FIELD_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -204,7 +204,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
public Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
return new MetadataFieldMapper.Builder<Builder, DummyMetadataFieldMapper>("_dummy", FIELD_TYPE) {
|
||||
return new MetadataFieldMapper.Builder<Builder, DummyMetadataFieldMapper>("_dummy", FIELD_TYPE, FIELD_TYPE) {
|
||||
@Override
|
||||
public DummyMetadataFieldMapper build(BuilderContext context) {
|
||||
return new DummyMetadataFieldMapper(context.indexSettings());
|
||||
|
@ -23,8 +23,8 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
@ -46,11 +46,12 @@ import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.shard.MergePolicyConfig;
|
||||
import org.elasticsearch.index.shard.MergeSchedulerConfig;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.index.translog.TranslogConfig;
|
||||
import org.elasticsearch.indices.cache.request.IndicesRequestCache;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
@ -315,7 +316,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
||||
.put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1")
|
||||
.put(MergeSchedulerConfig.MAX_MERGE_COUNT, "1")
|
||||
.put("index.merge.policy.type", "tiered")
|
||||
|
||||
.put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, "ASYNC")
|
||||
));
|
||||
ensureGreen();
|
||||
long termUpto = 0;
|
||||
|
@ -1,112 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.discovery.DiscoveryModule;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.test.transport.AssertingLocalTransport;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.transport.TransportRequestOptions;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@ClusterScope(scope = Scope.SUITE, numDataNodes = 2)
|
||||
public class PluggableTransportModuleIT extends ESIntegTestCase {
|
||||
public static final AtomicInteger SENT_REQUEST_COUNTER = new AtomicInteger(0);
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return settingsBuilder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put(DiscoveryModule.DISCOVERY_TYPE_KEY, "local")
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(CountingSentRequestsPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
|
||||
return pluginList(CountingSentRequestsPlugin.class);
|
||||
}
|
||||
|
||||
public void testThatPluginFunctionalityIsLoadedWithoutConfiguration() throws Exception {
|
||||
for (Transport transport : internalCluster().getInstances(Transport.class)) {
|
||||
assertThat(transport, instanceOf(CountingAssertingLocalTransport.class));
|
||||
}
|
||||
|
||||
int countBeforeRequest = SENT_REQUEST_COUNTER.get();
|
||||
internalCluster().clientNodeClient().admin().cluster().prepareHealth().get();
|
||||
int countAfterRequest = SENT_REQUEST_COUNTER.get();
|
||||
assertThat("Expected send request counter to be greather than zero", countAfterRequest, is(greaterThan(countBeforeRequest)));
|
||||
}
|
||||
|
||||
public static class CountingSentRequestsPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "counting-pipelines-plugin";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "counting-pipelines-plugin";
|
||||
}
|
||||
|
||||
public void onModule(TransportModule transportModule) {
|
||||
transportModule.setTransport(CountingAssertingLocalTransport.class, this.name());
|
||||
}
|
||||
}
|
||||
|
||||
public static final class CountingAssertingLocalTransport extends AssertingLocalTransport {
|
||||
|
||||
@Inject
|
||||
public CountingAssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
super(settings, threadPool, version, namedWriteableRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sendRequest(final DiscoveryNode node, final long requestId, final String action, final TransportRequest request, TransportRequestOptions options) throws IOException, TransportException {
|
||||
SENT_REQUEST_COUNTER.incrementAndGet();
|
||||
super.sendRequest(node, requestId, action, request, options);
|
||||
}
|
||||
}
|
||||
}
|
@ -19,8 +19,8 @@
|
||||
|
||||
package org.elasticsearch.plugins.responseheader;
|
||||
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestModule;
|
||||
|
||||
public class TestResponseHeaderPlugin extends Plugin {
|
||||
|
||||
@ -34,7 +34,7 @@ public class TestResponseHeaderPlugin extends Plugin {
|
||||
return "test-plugin-custom-header-desc";
|
||||
}
|
||||
|
||||
public void onModule(RestModule restModule) {
|
||||
restModule.addRestAction(TestResponseHeaderRestAction.class);
|
||||
public void onModule(NetworkModule module) {
|
||||
module.registerRestHandler(TestResponseHeaderRestAction.class);
|
||||
}
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ public class FileScriptTests extends ESTestCase {
|
||||
.put("script.engine." + MockScriptEngine.NAME + ".file.aggs", false).build();
|
||||
ScriptService scriptService = makeScriptService(settings);
|
||||
Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null);
|
||||
assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders));
|
||||
assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()));
|
||||
}
|
||||
|
||||
public void testAllOpsDisabled() throws Exception {
|
||||
@ -68,7 +68,7 @@ public class FileScriptTests extends ESTestCase {
|
||||
Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null);
|
||||
for (ScriptContext context : ScriptContext.Standard.values()) {
|
||||
try {
|
||||
scriptService.compile(script, context, contextAndHeaders);
|
||||
scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap());
|
||||
fail(context.getKey() + " script should have been rejected");
|
||||
} catch(Exception e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [file], operation [" + context.getKey() + "] and lang [" + MockScriptEngine.NAME + "] are disabled"));
|
||||
|
@ -36,6 +36,7 @@ import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
@ -62,7 +63,7 @@ public class NativeScriptTests extends ESTestCase {
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
|
||||
ExecutableScript executable = scriptService.executable(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null),
|
||||
ScriptContext.Standard.SEARCH, contextAndHeaders);
|
||||
ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap());
|
||||
assertThat(executable.run().toString(), equalTo("test"));
|
||||
terminate(injector.getInstance(ThreadPool.class));
|
||||
}
|
||||
@ -88,7 +89,7 @@ public class NativeScriptTests extends ESTestCase {
|
||||
|
||||
for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
|
||||
assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext,
|
||||
contextAndHeaders), notNullValue());
|
||||
contextAndHeaders, Collections.emptyMap()), notNullValue());
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,4 +111,4 @@ public class NativeScriptTests extends ESTestCase {
|
||||
return "test";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ public class ScriptContextTests extends ESTestCase {
|
||||
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
|
||||
try {
|
||||
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
|
||||
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), contextAndHeaders);
|
||||
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), contextAndHeaders, Collections.emptyMap());
|
||||
fail("script compilation should have been rejected");
|
||||
} catch (ScriptException e) {
|
||||
assertThat(e.getMessage(), containsString("scripts of type [" + scriptType + "], operation [" + PLUGIN_NAME + "_custom_globally_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled"));
|
||||
@ -71,16 +71,16 @@ public class ScriptContextTests extends ESTestCase {
|
||||
ScriptService scriptService = makeScriptService();
|
||||
Script script = new Script("1", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, null);
|
||||
try {
|
||||
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders);
|
||||
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders, Collections.emptyMap());
|
||||
fail("script compilation should have been rejected");
|
||||
} catch (ScriptException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [inline], operation [" + PLUGIN_NAME + "_custom_exp_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled"));
|
||||
}
|
||||
|
||||
// still works for other script contexts
|
||||
assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, contextAndHeaders));
|
||||
assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders));
|
||||
assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), contextAndHeaders));
|
||||
assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, contextAndHeaders, Collections.emptyMap()));
|
||||
assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()));
|
||||
assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), contextAndHeaders, Collections.emptyMap()));
|
||||
}
|
||||
|
||||
public void testUnknownPluginScriptContext() throws Exception {
|
||||
@ -89,7 +89,7 @@ public class ScriptContextTests extends ESTestCase {
|
||||
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
|
||||
try {
|
||||
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
|
||||
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), contextAndHeaders);
|
||||
scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), contextAndHeaders, Collections.emptyMap());
|
||||
fail("script compilation should have been rejected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("script context [" + PLUGIN_NAME + "_unknown] not supported"));
|
||||
@ -109,7 +109,7 @@ public class ScriptContextTests extends ESTestCase {
|
||||
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
|
||||
try {
|
||||
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
|
||||
scriptService.compile(script, context, contextAndHeaders);
|
||||
scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap());
|
||||
fail("script compilation should have been rejected");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("script context [test] not supported"));
|
||||
|
@ -252,7 +252,7 @@ public class ScriptModesTests extends ESTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -132,7 +132,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
logger.info("--> verify that file with extension was correctly processed");
|
||||
CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null),
|
||||
ScriptContext.Standard.SEARCH, contextAndHeaders);
|
||||
ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap());
|
||||
assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file"));
|
||||
|
||||
logger.info("--> delete both files");
|
||||
@ -143,7 +143,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
logger.info("--> verify that file with extension was correctly removed");
|
||||
try {
|
||||
scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH,
|
||||
contextAndHeaders);
|
||||
contextAndHeaders, Collections.emptyMap());
|
||||
fail("the script test_script should no longer exist");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), containsString("Unable to find on disk file script [test_script] using lang [test]"));
|
||||
@ -154,9 +154,9 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null),
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
CompiledScript compiledScript2 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null),
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
|
||||
}
|
||||
|
||||
@ -164,9 +164,9 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
CompiledScript compiledScript1 = scriptService.compile(new Script("script", ScriptType.INLINE, "test", null),
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
CompiledScript compiledScript2 = scriptService.compile(new Script("script", ScriptType.INLINE, "test2", null),
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
|
||||
}
|
||||
|
||||
@ -175,9 +175,9 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
buildScriptService(Settings.EMPTY);
|
||||
createFileScripts("test");
|
||||
CompiledScript compiledScript1 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null),
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
CompiledScript compiledScript2 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test2", null),
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
|
||||
}
|
||||
|
||||
@ -338,7 +338,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
for (String type : scriptEngineService.types()) {
|
||||
try {
|
||||
scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin(
|
||||
pluginName, unknownContext), contextAndHeaders);
|
||||
pluginName, unknownContext), contextAndHeaders, Collections.emptyMap());
|
||||
fail("script compilation should have been rejected");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported"));
|
||||
@ -349,20 +349,20 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
public void testCompileCountedInCompilationStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
public void testExecutableCountedInCompilationStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
public void testSearchCountedInCompilationStats() throws IOException {
|
||||
buildScriptService(Settings.EMPTY);
|
||||
scriptService.search(null, new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.search(null, new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@ -372,7 +372,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
int numberOfCompilations = randomIntBetween(1, 1024);
|
||||
for (int i = 0; i < numberOfCompilations; i++) {
|
||||
scriptService
|
||||
.compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
.compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
}
|
||||
assertEquals(numberOfCompilations, scriptService.stats().getCompilations());
|
||||
}
|
||||
@ -382,8 +382,8 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1);
|
||||
buildScriptService(builder.build());
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@ -391,14 +391,14 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
createFileScripts("test");
|
||||
scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
public void testIndexedScriptCountedInCompilationStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@ -407,8 +407,8 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1);
|
||||
buildScriptService(builder.build());
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
|
||||
assertEquals(2L, scriptService.stats().getCompilations());
|
||||
assertEquals(1L, scriptService.stats().getCacheEvictions());
|
||||
}
|
||||
@ -424,7 +424,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext,
|
||||
HasContextAndHeaders contextAndHeaders) {
|
||||
try {
|
||||
scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders);
|
||||
scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap());
|
||||
fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]");
|
||||
} catch(ScriptException e) {
|
||||
//all good
|
||||
@ -433,7 +433,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext,
|
||||
HasContextAndHeaders contextAndHeaders) {
|
||||
assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders), notNullValue());
|
||||
assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap()), notNullValue());
|
||||
}
|
||||
|
||||
public static class TestEngineService implements ScriptEngineService {
|
||||
@ -454,7 +454,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return "compiled_" + script;
|
||||
}
|
||||
|
||||
|
@ -1429,7 +1429,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@ -1555,7 +1555,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return script;
|
||||
}
|
||||
|
||||
|
@ -364,7 +364,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@ -500,7 +500,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@ -585,4 +585,4 @@ public class AvgIT extends AbstractNumericTestCase {
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -359,7 +359,7 @@ public class SumIT extends AbstractNumericTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@ -497,7 +497,7 @@ public class SumIT extends AbstractNumericTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@ -583,4 +583,4 @@ public class SumIT extends AbstractNumericTestCase {
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -244,7 +244,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@ -330,4 +330,4 @@ public class ValueCountIT extends ESIntegTestCase {
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.transport;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.ModuleTestCase;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.transport.AssertingLocalTransport;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
/** Unit tests for module registering custom transport and transport service */
|
||||
public class TransportModuleTests extends ModuleTestCase {
|
||||
|
||||
|
||||
|
||||
static class FakeTransport extends AssertingLocalTransport {
|
||||
@Inject
|
||||
public FakeTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
super(settings, threadPool, version, namedWriteableRegistry);
|
||||
}
|
||||
}
|
||||
|
||||
static class FakeTransportService extends TransportService {
|
||||
@Inject
|
||||
public FakeTransportService(Settings settings, Transport transport, ThreadPool threadPool) {
|
||||
super(settings, transport, threadPool);
|
||||
}
|
||||
}
|
||||
}
|
@ -21,13 +21,14 @@ package org.elasticsearch.transport.netty;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.component.Lifecycle;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
@ -40,7 +41,6 @@ import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.ActionNotFoundTransportException;
|
||||
import org.elasticsearch.transport.RequestHandlerRegistry;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.jboss.netty.channel.Channel;
|
||||
import org.jboss.netty.channel.ChannelPipeline;
|
||||
@ -66,7 +66,7 @@ public class NettyTransportIT extends ESIntegTestCase {
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return settingsBuilder().put(super.nodeSettings(nodeOrdinal))
|
||||
.put("node.mode", "network")
|
||||
.put(TransportModule.TRANSPORT_TYPE_KEY, "exception-throwing").build();
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, "exception-throwing").build();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -99,8 +99,8 @@ public class NettyTransportIT extends ESIntegTestCase {
|
||||
public String description() {
|
||||
return "an exception throwing transport for testing";
|
||||
}
|
||||
public void onModule(TransportModule transportModule) {
|
||||
transportModule.addTransport("exception-throwing", ExceptionThrowingNettyTransport.class);
|
||||
public void onModule(NetworkModule module) {
|
||||
module.registerTransport("exception-throwing", ExceptionThrowingNettyTransport.class);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,11 +19,12 @@
|
||||
package org.elasticsearch.transport.netty;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
@ -31,7 +32,6 @@ import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.test.junit.annotations.Network;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.Locale;
|
||||
@ -60,7 +60,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase {
|
||||
Settings.Builder builder = settingsBuilder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("network.host", "127.0.0.1")
|
||||
.put(TransportModule.TRANSPORT_TYPE_KEY, "netty")
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty")
|
||||
.put("node.mode", "network")
|
||||
.put("transport.profiles.client1.port", randomPortRange)
|
||||
.put("transport.profiles.client1.publish_host", "127.0.0.7")
|
||||
@ -72,7 +72,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase {
|
||||
public void testThatTransportClientCanConnect() throws Exception {
|
||||
Settings settings = settingsBuilder()
|
||||
.put("cluster.name", internalCluster().getClusterName())
|
||||
.put(TransportModule.TRANSPORT_TYPE_KEY, "netty")
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty")
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
try (TransportClient transportClient = TransportClient.builder().settings(settings).build()) {
|
||||
|
@ -21,22 +21,19 @@ package org.elasticsearch.transport.netty;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.network.NetworkUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
|
||||
import java.net.Inet4Address;
|
||||
import java.net.Inet6Address;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
/**
|
||||
* Checks that Elasticsearch produces a sane publish_address when it binds to
|
||||
@ -48,7 +45,7 @@ public class NettyTransportPublishAddressIT extends ESIntegTestCase {
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put(TransportModule.TRANSPORT_TYPE_KEY, "netty")
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty")
|
||||
.put("node.mode", "network").build();
|
||||
}
|
||||
|
||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.discovery.MasterNotDiscoveredException;
|
||||
import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.test.NodeConfigurationSource;
|
||||
@ -47,6 +48,7 @@ import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
@ -79,6 +81,11 @@ public class TribeIT extends ESIntegTestCase {
|
||||
return Settings.builder().put(Node.HTTP_ENABLED, false).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings transportClientSettings() {
|
||||
return null;
|
||||
@ -86,7 +93,7 @@ public class TribeIT extends ESIntegTestCase {
|
||||
|
||||
};
|
||||
cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), 2, 2,
|
||||
Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, true);
|
||||
Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList());
|
||||
|
||||
cluster2.beforeTest(getRandom(), 0.1);
|
||||
cluster2.ensureAtLeastNumDataNodes(2);
|
||||
|
@ -120,7 +120,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return new Object(); // unused
|
||||
}
|
||||
|
||||
@ -218,7 +218,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@ -309,7 +309,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return new Object(); // unused
|
||||
}
|
||||
|
||||
@ -400,7 +400,7 @@ public class UpdateIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
return new Object(); // unused
|
||||
}
|
||||
|
||||
|
@ -3,9 +3,7 @@
|
||||
|
||||
Named `delimited_payload_filter`. Splits tokens into tokens and payload whenever a delimiter character is found.
|
||||
|
||||
Example: "the|1 quick|2 fox|3" is split per default int to tokens `fox`, `quick` and `the` with payloads `1`, `2` and `3` respectively.
|
||||
|
||||
|
||||
Example: "the|1 quick|2 fox|3" is split by default into tokens `the`, `quick`, and `fox` with payloads `1`, `2`, and `3` respectively.
|
||||
|
||||
Parameters:
|
||||
|
||||
|
@ -98,7 +98,7 @@ curl -XGET 'localhost:9200/_search?scroll=1m' -d '
|
||||
{
|
||||
"sort": [
|
||||
"_doc"
|
||||
}
|
||||
]
|
||||
}
|
||||
'
|
||||
--------------------------------------------------
|
||||
|
@ -33,6 +33,10 @@ dependencyLicenses {
|
||||
mapping from: /lucene-.*/, to: 'lucene'
|
||||
}
|
||||
|
||||
// do we or do we not depend on asm-tree, that is the question
|
||||
// classes are missing, e.g. org.objectweb.asm.tree.LabelNode
|
||||
thirdPartyAudit.missingClasses = true
|
||||
|
||||
compileJava.options.compilerArgs << '-Xlint:-rawtypes'
|
||||
compileTestJava.options.compilerArgs << '-Xlint:-rawtypes'
|
||||
|
||||
|
@ -95,7 +95,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
// classloader created here
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
if (sm != null) {
|
||||
|
@ -33,23 +33,23 @@ public class ExpressionTests extends ESSingleNodeTestCase {
|
||||
|
||||
public void testNeedsScores() {
|
||||
IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double");
|
||||
|
||||
|
||||
ExpressionScriptEngineService service = new ExpressionScriptEngineService(Settings.EMPTY);
|
||||
SearchLookup lookup = new SearchLookup(index.mapperService(), index.fieldData(), null);
|
||||
|
||||
Object compiled = service.compile("1.2");
|
||||
Object compiled = service.compile("1.2", Collections.emptyMap());
|
||||
SearchScript ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.<String, Object>emptyMap());
|
||||
assertFalse(ss.needsScores());
|
||||
|
||||
compiled = service.compile("doc['d'].value");
|
||||
compiled = service.compile("doc['d'].value", Collections.emptyMap());
|
||||
ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.<String, Object>emptyMap());
|
||||
assertFalse(ss.needsScores());
|
||||
|
||||
compiled = service.compile("1/_score");
|
||||
compiled = service.compile("1/_score", Collections.emptyMap());
|
||||
ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.<String, Object>emptyMap());
|
||||
assertTrue(ss.needsScores());
|
||||
|
||||
compiled = service.compile("doc['d'].value * _score");
|
||||
compiled = service.compile("doc['d'].value * _score", Collections.emptyMap());
|
||||
ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "expression", compiled), lookup, Collections.<String, Object>emptyMap());
|
||||
assertTrue(ss.needsScores());
|
||||
}
|
||||
|
@ -35,3 +35,12 @@ integTest {
|
||||
systemProperty 'es.script.indexed', 'on'
|
||||
}
|
||||
}
|
||||
|
||||
// classes are missing, e.g. jline.console.completer.Completer
|
||||
thirdPartyAudit.missingClasses = true
|
||||
thirdPartyAudit.excludes = [
|
||||
// uses internal java api: sun.misc.Unsafe
|
||||
'groovy.json.internal.FastStringUtils',
|
||||
'groovy.json.internal.FastStringUtils$StringImplementation$1',
|
||||
'groovy.json.internal.FastStringUtils$StringImplementation$2',
|
||||
]
|
||||
|
@ -165,7 +165,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
public Object compile(String script, Map<String, String> params) {
|
||||
try {
|
||||
// we reuse classloader, so do a security check just in case.
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
|
@ -85,6 +85,7 @@ import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
@ -95,16 +96,32 @@ import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.*;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportChannel;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.transport.TransportRequestHandler;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.emptyIterable;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
@ClusterScope(scope = Scope.SUITE, numClientNodes = 1, minNumDataNodes = 2)
|
||||
public class IndicesRequestTests extends ESIntegTestCase {
|
||||
@ -127,7 +144,7 @@ public class IndicesRequestTests extends ESIntegTestCase {
|
||||
protected Settings nodeSettings(int ordinal) {
|
||||
// must set this independently of the plugin so it overrides MockTransportService
|
||||
return Settings.builder().put(super.nodeSettings(ordinal))
|
||||
.put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, "intercepting").build();
|
||||
.put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "intercepting").build();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -756,8 +773,8 @@ public class IndicesRequestTests extends ESIntegTestCase {
|
||||
public String description() {
|
||||
return "an intercepting transport service for testing";
|
||||
}
|
||||
public void onModule(TransportModule transportModule) {
|
||||
transportModule.addTransportService("intercepting", InterceptingTransportService.class);
|
||||
public void onModule(NetworkModule module) {
|
||||
module.registerTransportService("intercepting", InterceptingTransportService.class);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class GroovySecurityTests extends ESTestCase {
|
||||
// filtered directly by our classloader
|
||||
assertFailure("getClass().getClassLoader().loadClass(\"java.lang.Runtime\").availableProcessors()", PrivilegedActionException.class);
|
||||
// unfortunately, we have access to other classloaders (due to indy mechanism needing getClassLoader permission)
|
||||
// but we can't do much with them directly at least.
|
||||
// but we can't do much with them directly at least.
|
||||
assertFailure("myobject.getClass().getClassLoader().loadClass(\"java.lang.Runtime\").availableProcessors()", SecurityException.class);
|
||||
assertFailure("d = new DateTime(); d.getClass().getDeclaredMethod(\"year\").setAccessible(true)", SecurityException.class);
|
||||
assertFailure("d = new DateTime(); d.\"${'get' + 'Class'}\"()." +
|
||||
@ -133,9 +133,9 @@ public class GroovySecurityTests extends ESTestCase {
|
||||
vars.put("myarray", Arrays.asList("foo"));
|
||||
vars.put("myobject", new MyObject());
|
||||
|
||||
se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script)), vars).run();
|
||||
se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script, Collections.emptyMap())), vars).run();
|
||||
}
|
||||
|
||||
|
||||
public static class MyObject {
|
||||
public int getPrimitive() { return 0; }
|
||||
public Object getObject() { return "value"; }
|
||||
|
@ -85,7 +85,7 @@ public class MustacheScriptEngineService extends AbstractComponent implements Sc
|
||||
* @return a compiled template object for later execution.
|
||||
* */
|
||||
@Override
|
||||
public Object compile(String template) {
|
||||
public Object compile(String template, Map<String, String> params) {
|
||||
/** Factory to generate Mustache objects from. */
|
||||
return (new JsonEscapingMustacheFactory()).compile(new FastStringReader(template), "query-template");
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import org.junit.Before;
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -52,7 +53,7 @@ public class MustacheScriptEngineTests extends ESTestCase {
|
||||
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + "}}, \"negative_boost\": {{boost_val}} } }}";
|
||||
Map<String, Object> vars = new HashMap<>();
|
||||
vars.put("boost_val", "0.3");
|
||||
BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template)), vars).run();
|
||||
BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run();
|
||||
assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}},"
|
||||
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}",
|
||||
new String(o.toBytes(), Charset.forName("UTF-8")));
|
||||
@ -63,7 +64,7 @@ public class MustacheScriptEngineTests extends ESTestCase {
|
||||
Map<String, Object> vars = new HashMap<>();
|
||||
vars.put("boost_val", "0.3");
|
||||
vars.put("body_val", "\"quick brown\"");
|
||||
BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template)), vars).run();
|
||||
BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run();
|
||||
assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}},"
|
||||
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}",
|
||||
new String(o.toBytes(), Charset.forName("UTF-8")));
|
||||
|
@ -22,14 +22,10 @@ package org.elasticsearch.plugin.deletebyquery;
|
||||
import org.elasticsearch.action.ActionModule;
|
||||
import org.elasticsearch.action.deletebyquery.DeleteByQueryAction;
|
||||
import org.elasticsearch.action.deletebyquery.TransportDeleteByQueryAction;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestModule;
|
||||
import org.elasticsearch.rest.action.deletebyquery.RestDeleteByQueryAction;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
public class DeleteByQueryPlugin extends Plugin {
|
||||
|
||||
public static final String NAME = "delete-by-query";
|
||||
@ -48,8 +44,8 @@ public class DeleteByQueryPlugin extends Plugin {
|
||||
actionModule.registerAction(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class);
|
||||
}
|
||||
|
||||
public void onModule(RestModule restModule) {
|
||||
restModule.addRestAction(RestDeleteByQueryAction.class);
|
||||
public void onModule(NetworkModule module) {
|
||||
module.registerRestHandler(RestDeleteByQueryAction.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user