Merge branch 'master' into feature/ingest
This commit is contained in:
commit
b4b698f653
|
@ -288,10 +288,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.tasks.withType(JavaCompile) {
|
||||
options.fork = true
|
||||
options.forkOptions.executable = new File(project.javaHome, 'bin/javac')
|
||||
options.forkOptions.memoryMaximumSize = "1g"
|
||||
/*
|
||||
* -path because gradle will send in paths that don't always exist.
|
||||
* -missing because we have tons of missing @returns and @param.
|
||||
*/
|
||||
// don't even think about passing args with -J-xxx, oracle will ask you to submit a bug report :)
|
||||
options.compilerArgs << '-Werror' << '-Xlint:all,-path' << '-Xdoclint:all' << '-Xdoclint:-missing'
|
||||
options.encoding = 'UTF-8'
|
||||
}
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.tasks.Exec
|
||||
|
||||
/**
|
||||
* A wrapper around gradle's Exec task to capture output and log on error.
|
||||
*/
|
||||
class LoggedExec extends Exec {
|
||||
LoggedExec() {
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
standardOutput = new ByteArrayOutputStream()
|
||||
errorOutput = standardOutput
|
||||
ignoreExitValue = true
|
||||
doLast {
|
||||
if (execResult.exitValue != 0) {
|
||||
standardOutput.toString('UTF-8').eachLine { line -> logger.error(line) }
|
||||
throw new GradleException("Process '${executable} ${args.join(' ')}' finished with non-zero exit value ${execResult.exitValue}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.gradle.test
|
|||
|
||||
import org.apache.tools.ant.DefaultLogger
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.gradle.api.*
|
||||
|
@ -269,7 +270,7 @@ class ClusterFormationTasks {
|
|||
|
||||
/** Adds a task to execute a command to help setup the cluster */
|
||||
static Task configureExecTask(String name, Project project, Task setup, NodeInfo node, Object[] execArgs) {
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: setup) {
|
||||
return project.tasks.create(name: name, type: LoggedExec, dependsOn: setup) {
|
||||
workingDir node.cwd
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable 'cmd'
|
||||
|
@ -278,18 +279,6 @@ class ClusterFormationTasks {
|
|||
executable 'sh'
|
||||
}
|
||||
args execArgs
|
||||
// only show output on failure, when not in info or debug mode
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
standardOutput = new ByteArrayOutputStream()
|
||||
errorOutput = standardOutput
|
||||
ignoreExitValue = true
|
||||
doLast {
|
||||
if (execResult.exitValue != 0) {
|
||||
logger.error(standardOutput.toString())
|
||||
throw new GradleException("Process '${execArgs.join(' ')}' finished with non-zero exit value ${execResult.exitValue}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -467,7 +456,7 @@ class ClusterFormationTasks {
|
|||
|
||||
/** Adds a task to kill an elasticsearch node with the given pidfile */
|
||||
static Task configureStopTask(String name, Project project, Object depends, NodeInfo node) {
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: depends) {
|
||||
return project.tasks.create(name: name, type: LoggedExec, dependsOn: depends) {
|
||||
onlyIf { node.pidFile.exists() }
|
||||
// the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString
|
||||
ext.pid = "${ -> node.pidFile.getText('UTF-8').trim()}"
|
||||
|
|
|
@ -66,6 +66,7 @@ final class JNACLibrary {
|
|||
}
|
||||
|
||||
static native int getrlimit(int resource, Rlimit rlimit);
|
||||
static native int setrlimit(int resource, Rlimit rlimit);
|
||||
|
||||
static native String strerror(int errno);
|
||||
|
||||
|
|
|
@ -217,4 +217,88 @@ final class JNAKernel32Library {
|
|||
* @return true if the function succeeds.
|
||||
*/
|
||||
native boolean CloseHandle(Pointer handle);
|
||||
|
||||
/**
|
||||
* Creates or opens a new job object
|
||||
*
|
||||
* https://msdn.microsoft.com/en-us/library/windows/desktop/ms682409%28v=vs.85%29.aspx
|
||||
*
|
||||
* @param jobAttributes security attributes
|
||||
* @param name job name
|
||||
* @return job handle if the function succeeds
|
||||
*/
|
||||
native Pointer CreateJobObjectW(Pointer jobAttributes, String name);
|
||||
|
||||
/**
|
||||
* Associates a process with an existing job
|
||||
*
|
||||
* https://msdn.microsoft.com/en-us/library/windows/desktop/ms681949%28v=vs.85%29.aspx
|
||||
*
|
||||
* @param job job handle
|
||||
* @param process process handle
|
||||
* @return true if the function succeeds
|
||||
*/
|
||||
native boolean AssignProcessToJobObject(Pointer job, Pointer process);
|
||||
|
||||
/**
|
||||
* Basic limit information for a job object
|
||||
*
|
||||
* https://msdn.microsoft.com/en-us/library/windows/desktop/ms684147%28v=vs.85%29.aspx
|
||||
*/
|
||||
public static class JOBOBJECT_BASIC_LIMIT_INFORMATION extends Structure implements Structure.ByReference {
|
||||
public long PerProcessUserTimeLimit;
|
||||
public long PerJobUserTimeLimit;
|
||||
public int LimitFlags;
|
||||
public SizeT MinimumWorkingSetSize;
|
||||
public SizeT MaximumWorkingSetSize;
|
||||
public int ActiveProcessLimit;
|
||||
public Pointer Affinity;
|
||||
public int PriorityClass;
|
||||
public int SchedulingClass;
|
||||
|
||||
@Override
|
||||
protected List<String> getFieldOrder() {
|
||||
return Arrays.asList(new String[] {
|
||||
"PerProcessUserTimeLimit", "PerJobUserTimeLimit", "LimitFlags", "MinimumWorkingSetSize",
|
||||
"MaximumWorkingSetSize", "ActiveProcessLimit", "Affinity", "PriorityClass", "SchedulingClass"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constant for JOBOBJECT_BASIC_LIMIT_INFORMATION in Query/Set InformationJobObject
|
||||
*/
|
||||
static final int JOBOBJECT_BASIC_LIMIT_INFORMATION_CLASS = 2;
|
||||
|
||||
/**
|
||||
* Constant for LimitFlags, indicating a process limit has been set
|
||||
*/
|
||||
static final int JOB_OBJECT_LIMIT_ACTIVE_PROCESS = 8;
|
||||
|
||||
/**
|
||||
* Get job limit and state information
|
||||
*
|
||||
* https://msdn.microsoft.com/en-us/library/windows/desktop/ms684925%28v=vs.85%29.aspx
|
||||
*
|
||||
* @param job job handle
|
||||
* @param infoClass information class constant
|
||||
* @param info pointer to information structure
|
||||
* @param infoLength size of information structure
|
||||
* @param returnLength length of data written back to structure (or null if not wanted)
|
||||
* @return true if the function succeeds
|
||||
*/
|
||||
native boolean QueryInformationJobObject(Pointer job, int infoClass, Pointer info, int infoLength, Pointer returnLength);
|
||||
|
||||
/**
|
||||
* Set job limit and state information
|
||||
*
|
||||
* https://msdn.microsoft.com/en-us/library/windows/desktop/ms686216%28v=vs.85%29.aspx
|
||||
*
|
||||
* @param job job handle
|
||||
* @param infoClass information class constant
|
||||
* @param info pointer to information structure
|
||||
* @param infoLength size of information structure
|
||||
* @return true if the function succeeds
|
||||
*/
|
||||
native boolean SetInformationJobObject(Pointer job, int infoClass, Pointer info, int infoLength);
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ import java.util.Map;
|
|||
* Installs a limited form of secure computing mode,
|
||||
* to filters system calls to block process execution.
|
||||
* <p>
|
||||
* This is only supported on the Linux, Solaris, and Mac OS X operating systems.
|
||||
* This is supported on Linux, Solaris, FreeBSD, OpenBSD, Mac OS X, and Windows.
|
||||
* <p>
|
||||
* On Linux it currently supports amd64 and i386 architectures, requires Linux kernel 3.5 or above, and requires
|
||||
* {@code CONFIG_SECCOMP} and {@code CONFIG_SECCOMP_FILTER} compiled into the kernel.
|
||||
|
@ -71,6 +71,8 @@ import java.util.Map;
|
|||
* <li>{@code PRIV_PROC_EXEC}</li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* On BSD systems, process creation is restricted with {@code setrlimit(RLIMIT_NPROC)}.
|
||||
* <p>
|
||||
* On Mac OS X Leopard or above, a custom {@code sandbox(7)} ("Seatbelt") profile is installed that
|
||||
* denies the following rules:
|
||||
* <ul>
|
||||
|
@ -78,6 +80,8 @@ import java.util.Map;
|
|||
* <li>{@code process-exec}</li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* On Windows, process creation is restricted with {@code SetInformationJobObject/ActiveProcessLimit}.
|
||||
* <p>
|
||||
* This is not intended as a sandbox. It is another level of security, mostly intended to annoy
|
||||
* security researchers and make their lives more difficult in achieving "remote execution" exploits.
|
||||
* @see <a href="http://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt">
|
||||
|
@ -327,7 +331,8 @@ final class Seccomp {
|
|||
case 1: break; // already set by caller
|
||||
default:
|
||||
int errno = Native.getLastError();
|
||||
if (errno == ENOSYS) {
|
||||
if (errno == EINVAL) {
|
||||
// friendly error, this will be the typical case for an old kernel
|
||||
throw new UnsupportedOperationException("seccomp unavailable: requires kernel 3.5+ with CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER compiled in");
|
||||
} else {
|
||||
throw new UnsupportedOperationException("prctl(PR_GET_NO_NEW_PRIVS): " + JNACLibrary.strerror(errno));
|
||||
|
@ -534,6 +539,73 @@ final class Seccomp {
|
|||
logger.debug("Solaris priv_set initialization successful");
|
||||
}
|
||||
|
||||
// BSD implementation via setrlimit(2)
|
||||
|
||||
// TODO: add OpenBSD to Lucene Constants
|
||||
// TODO: JNA doesn't have netbsd support, but this mechanism should work there too.
|
||||
static final boolean OPENBSD = Constants.OS_NAME.startsWith("OpenBSD");
|
||||
|
||||
// not a standard limit, means something different on linux, etc!
|
||||
static final int RLIMIT_NPROC = 7;
|
||||
|
||||
static void bsdImpl() {
|
||||
boolean supported = Constants.FREE_BSD || OPENBSD || Constants.MAC_OS_X;
|
||||
if (supported == false) {
|
||||
throw new IllegalStateException("bug: should not be trying to initialize RLIMIT_NPROC for an unsupported OS");
|
||||
}
|
||||
|
||||
JNACLibrary.Rlimit limit = new JNACLibrary.Rlimit();
|
||||
limit.rlim_cur.setValue(0);
|
||||
limit.rlim_max.setValue(0);
|
||||
if (JNACLibrary.setrlimit(RLIMIT_NPROC, limit) != 0) {
|
||||
throw new UnsupportedOperationException("RLIMIT_NPROC unavailable: " + JNACLibrary.strerror(Native.getLastError()));
|
||||
}
|
||||
|
||||
logger.debug("BSD RLIMIT_NPROC initialization successful");
|
||||
}
|
||||
|
||||
// windows impl via job ActiveProcessLimit
|
||||
|
||||
static void windowsImpl() {
|
||||
if (!Constants.WINDOWS) {
|
||||
throw new IllegalStateException("bug: should not be trying to initialize ActiveProcessLimit for an unsupported OS");
|
||||
}
|
||||
|
||||
JNAKernel32Library lib = JNAKernel32Library.getInstance();
|
||||
|
||||
// create a new Job
|
||||
Pointer job = lib.CreateJobObjectW(null, null);
|
||||
if (job == null) {
|
||||
throw new UnsupportedOperationException("CreateJobObject: " + Native.getLastError());
|
||||
}
|
||||
|
||||
try {
|
||||
// retrieve the current basic limits of the job
|
||||
int clazz = JNAKernel32Library.JOBOBJECT_BASIC_LIMIT_INFORMATION_CLASS;
|
||||
JNAKernel32Library.JOBOBJECT_BASIC_LIMIT_INFORMATION limits = new JNAKernel32Library.JOBOBJECT_BASIC_LIMIT_INFORMATION();
|
||||
limits.write();
|
||||
if (!lib.QueryInformationJobObject(job, clazz, limits.getPointer(), limits.size(), null)) {
|
||||
throw new UnsupportedOperationException("QueryInformationJobObject: " + Native.getLastError());
|
||||
}
|
||||
limits.read();
|
||||
// modify the number of active processes to be 1 (exactly the one process we will add to the job).
|
||||
limits.ActiveProcessLimit = 1;
|
||||
limits.LimitFlags = JNAKernel32Library.JOB_OBJECT_LIMIT_ACTIVE_PROCESS;
|
||||
limits.write();
|
||||
if (!lib.SetInformationJobObject(job, clazz, limits.getPointer(), limits.size())) {
|
||||
throw new UnsupportedOperationException("SetInformationJobObject: " + Native.getLastError());
|
||||
}
|
||||
// assign ourselves to the job
|
||||
if (!lib.AssignProcessToJobObject(job, lib.GetCurrentProcess())) {
|
||||
throw new UnsupportedOperationException("AssignProcessToJobObject: " + Native.getLastError());
|
||||
}
|
||||
} finally {
|
||||
lib.CloseHandle(job);
|
||||
}
|
||||
|
||||
logger.debug("Windows ActiveProcessLimit initialization successful");
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to drop the capability to execute for the process.
|
||||
* <p>
|
||||
|
@ -544,11 +616,19 @@ final class Seccomp {
|
|||
if (Constants.LINUX) {
|
||||
return linuxImpl();
|
||||
} else if (Constants.MAC_OS_X) {
|
||||
// try to enable both mechanisms if possible
|
||||
bsdImpl();
|
||||
macImpl(tmpFile);
|
||||
return 1;
|
||||
} else if (Constants.SUN_OS) {
|
||||
solarisImpl();
|
||||
return 1;
|
||||
} else if (Constants.FREE_BSD || OPENBSD) {
|
||||
bsdImpl();
|
||||
return 1;
|
||||
} else if (Constants.WINDOWS) {
|
||||
windowsImpl();
|
||||
return 1;
|
||||
} else {
|
||||
throw new UnsupportedOperationException("syscall filtering not supported for OS: '" + Constants.OS_NAME + "'");
|
||||
}
|
||||
|
|
|
@ -38,7 +38,9 @@ import org.elasticsearch.common.inject.Injector;
|
|||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -46,6 +48,7 @@ import org.elasticsearch.env.EnvironmentModule;
|
|||
import org.elasticsearch.indices.breaker.CircuitBreakerModule;
|
||||
import org.elasticsearch.monitor.MonitorService;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.PluginsModule;
|
||||
import org.elasticsearch.plugins.PluginsService;
|
||||
|
@ -128,7 +131,8 @@ public class TransportClient extends AbstractClient {
|
|||
Version version = Version.CURRENT;
|
||||
|
||||
final ThreadPool threadPool = new ThreadPool(settings);
|
||||
|
||||
final NetworkService networkService = new NetworkService(settings);
|
||||
final SettingsFilter settingsFilter = new SettingsFilter(settings);
|
||||
boolean success = false;
|
||||
try {
|
||||
ModulesBuilder modules = new ModulesBuilder();
|
||||
|
@ -138,8 +142,8 @@ public class TransportClient extends AbstractClient {
|
|||
modules.add(pluginModule);
|
||||
}
|
||||
modules.add(new PluginsModule(pluginsService));
|
||||
modules.add(new SettingsModule(this.settings));
|
||||
modules.add(new NetworkModule());
|
||||
modules.add(new SettingsModule(this.settings, settingsFilter ));
|
||||
modules.add(new NetworkModule(networkService));
|
||||
modules.add(new ClusterNameModule(this.settings));
|
||||
modules.add(new ThreadPoolModule(threadPool));
|
||||
modules.add(new TransportModule(this.settings));
|
||||
|
|
|
@ -26,8 +26,14 @@ import org.elasticsearch.common.inject.AbstractModule;
|
|||
*/
|
||||
public class NetworkModule extends AbstractModule {
|
||||
|
||||
private final NetworkService networkService;
|
||||
|
||||
public NetworkModule(NetworkService networkService) {
|
||||
this.networkService = networkService;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(NetworkService.class).asEagerSingleton();
|
||||
bind(NetworkService.class).toInstance(networkService);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.common.network;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -80,7 +79,6 @@ public class NetworkService extends AbstractComponent {
|
|||
|
||||
private final List<CustomNameResolver> customNameResolvers = new CopyOnWriteArrayList<>();
|
||||
|
||||
@Inject
|
||||
public NetworkService(Settings settings) {
|
||||
super(settings);
|
||||
IfConfig.logIfNecessary();
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.elasticsearch.common.settings;
|
|||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
|
@ -35,7 +34,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class SettingsFilter extends AbstractComponent {
|
||||
public final class SettingsFilter extends AbstractComponent {
|
||||
/**
|
||||
* Can be used to specify settings filter that will be used to filter out matching settings in toXContent method
|
||||
*/
|
||||
|
@ -43,7 +42,6 @@ public class SettingsFilter extends AbstractComponent {
|
|||
|
||||
private final CopyOnWriteArrayList<String> patterns = new CopyOnWriteArrayList<>();
|
||||
|
||||
@Inject
|
||||
public SettingsFilter(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
|
|
@ -29,14 +29,16 @@ import org.elasticsearch.common.inject.AbstractModule;
|
|||
public class SettingsModule extends AbstractModule {
|
||||
|
||||
private final Settings settings;
|
||||
private final SettingsFilter settingsFilter;
|
||||
|
||||
public SettingsModule(Settings settings) {
|
||||
public SettingsModule(Settings settings, SettingsFilter settingsFilter) {
|
||||
this.settings = settings;
|
||||
this.settingsFilter = settingsFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(Settings.class).toInstance(settings);
|
||||
bind(SettingsFilter.class).asEagerSingleton();
|
||||
bind(SettingsFilter.class).toInstance(settingsFilter);
|
||||
}
|
||||
}
|
|
@ -206,7 +206,7 @@ public class GeoDistanceRangeQuery extends Query {
|
|||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return "GeoDistanceRangeFilter(" + indexFieldData.getFieldNames().indexName() + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")";
|
||||
return "GeoDistanceRangeQuery(" + indexFieldData.getFieldNames().indexName() + ", " + geoDistance + ", [" + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -103,7 +103,7 @@ public class GeoPolygonQuery extends Query {
|
|||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
StringBuilder sb = new StringBuilder("GeoPolygonFilter(");
|
||||
StringBuilder sb = new StringBuilder("GeoPolygonQuery(");
|
||||
sb.append(indexFieldData.getFieldNames().indexName());
|
||||
sb.append(", ").append(Arrays.toString(points)).append(')');
|
||||
return sb.toString();
|
||||
|
|
|
@ -47,6 +47,7 @@ public final class BufferingTranslogWriter extends TranslogWriter {
|
|||
@Override
|
||||
public Translog.Location add(BytesReference data) throws IOException {
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
ensureOpen();
|
||||
operationCounter++;
|
||||
final long offset = totalOffset;
|
||||
if (data.length() >= buffer.length) {
|
||||
|
@ -106,19 +107,25 @@ public final class BufferingTranslogWriter extends TranslogWriter {
|
|||
return;
|
||||
}
|
||||
synchronized (this) {
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
flush();
|
||||
lastSyncedOffset = totalOffset;
|
||||
channelReference.incRef();
|
||||
try {
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
flush();
|
||||
lastSyncedOffset = totalOffset;
|
||||
}
|
||||
// we can do this outside of the write lock but we have to protect from
|
||||
// concurrent syncs
|
||||
checkpoint(lastSyncedOffset, operationCounter, channelReference);
|
||||
} finally {
|
||||
channelReference.decRef();
|
||||
}
|
||||
// we can do this outside of the write lock but we have to protect from
|
||||
// concurrent syncs
|
||||
checkpoint(lastSyncedOffset, operationCounter, channelReference);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void updateBufferSize(int bufferSize) {
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
ensureOpen();
|
||||
if (this.buffer.length != bufferSize) {
|
||||
flush();
|
||||
this.buffer = new byte[bufferSize];
|
||||
|
|
|
@ -407,6 +407,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
out.seek(end);
|
||||
final ReleasablePagedBytesReference bytes = out.bytes();
|
||||
try (ReleasableLock lock = readLock.acquire()) {
|
||||
ensureOpen();
|
||||
Location location = current.add(bytes);
|
||||
if (config.isSyncOnEachOperation()) {
|
||||
current.sync();
|
||||
|
@ -414,6 +415,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
assert current.assertBytesAtLocation(location, bytes);
|
||||
return location;
|
||||
}
|
||||
} catch (AlreadyClosedException ex) {
|
||||
throw ex;
|
||||
} catch (Throwable e) {
|
||||
throw new TranslogException(shardId, "Failed to write operation [" + operation + "]", e);
|
||||
} finally {
|
||||
|
@ -1285,8 +1288,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
|
||||
@Override
|
||||
public void prepareCommit() throws IOException {
|
||||
ensureOpen();
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
ensureOpen();
|
||||
if (currentCommittingTranslog != null) {
|
||||
throw new IllegalStateException("already committing a translog with generation: " + currentCommittingTranslog.getGeneration());
|
||||
}
|
||||
|
@ -1318,9 +1321,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
|
||||
@Override
|
||||
public void commit() throws IOException {
|
||||
ensureOpen();
|
||||
ImmutableTranslogReader toClose = null;
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
ensureOpen();
|
||||
if (currentCommittingTranslog == null) {
|
||||
prepareCommit();
|
||||
}
|
||||
|
|
|
@ -123,9 +123,9 @@ public class TranslogWriter extends TranslogReader {
|
|||
* add the given bytes to the translog and return the location they were written at
|
||||
*/
|
||||
public Translog.Location add(BytesReference data) throws IOException {
|
||||
ensureOpen();
|
||||
final long position;
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
ensureOpen();
|
||||
position = writtenOffset;
|
||||
data.writeTo(channel);
|
||||
writtenOffset = writtenOffset + data.length();
|
||||
|
@ -200,9 +200,9 @@ public class TranslogWriter extends TranslogReader {
|
|||
* returns a new immutable reader which only exposes the current written operation *
|
||||
*/
|
||||
public ImmutableTranslogReader immutableReader() throws TranslogException {
|
||||
ensureOpen();
|
||||
if (channelReference.tryIncRef()) {
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
ensureOpen();
|
||||
flush();
|
||||
ImmutableTranslogReader reader = new ImmutableTranslogReader(this.generation, channelReference, firstOperationOffset, writtenOffset, operationCounter);
|
||||
channelReference.incRef(); // for new reader
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.monitor;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.monitor.fs.FsProbe;
|
||||
import org.elasticsearch.monitor.fs.FsService;
|
||||
import org.elasticsearch.monitor.jvm.JvmMonitorService;
|
||||
import org.elasticsearch.monitor.jvm.JvmService;
|
||||
import org.elasticsearch.monitor.os.OsProbe;
|
||||
import org.elasticsearch.monitor.os.OsService;
|
||||
import org.elasticsearch.monitor.process.ProcessProbe;
|
||||
import org.elasticsearch.monitor.process.ProcessService;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class MonitorModule extends AbstractModule {
|
||||
|
||||
public static final class MonitorSettings {
|
||||
public static final String MEMORY_MANAGER_TYPE = "monitor.memory.type";
|
||||
}
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
public MonitorModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
// bind default implementations
|
||||
bind(ProcessProbe.class).toInstance(ProcessProbe.getInstance());
|
||||
bind(OsProbe.class).toInstance(OsProbe.getInstance());
|
||||
bind(FsProbe.class).asEagerSingleton();
|
||||
|
||||
// bind other services
|
||||
bind(ProcessService.class).asEagerSingleton();
|
||||
bind(OsService.class).asEagerSingleton();
|
||||
bind(JvmService.class).asEagerSingleton();
|
||||
bind(FsService.class).asEagerSingleton();
|
||||
|
||||
bind(JvmMonitorService.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -20,13 +20,16 @@
|
|||
package org.elasticsearch.monitor;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.monitor.fs.FsService;
|
||||
import org.elasticsearch.monitor.jvm.JvmMonitorService;
|
||||
import org.elasticsearch.monitor.jvm.JvmService;
|
||||
import org.elasticsearch.monitor.os.OsService;
|
||||
import org.elasticsearch.monitor.process.ProcessService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -43,16 +46,13 @@ public class MonitorService extends AbstractLifecycleComponent<MonitorService> {
|
|||
|
||||
private final FsService fsService;
|
||||
|
||||
@Inject
|
||||
public MonitorService(Settings settings, JvmMonitorService jvmMonitorService,
|
||||
OsService osService, ProcessService processService, JvmService jvmService,
|
||||
FsService fsService) {
|
||||
public MonitorService(Settings settings, NodeEnvironment nodeEnvironment, ThreadPool threadPool) throws IOException {
|
||||
super(settings);
|
||||
this.jvmMonitorService = jvmMonitorService;
|
||||
this.osService = osService;
|
||||
this.processService = processService;
|
||||
this.jvmService = jvmService;
|
||||
this.fsService = fsService;
|
||||
this.jvmMonitorService = new JvmMonitorService(settings, threadPool);
|
||||
this.osService = new OsService(settings);
|
||||
this.processService = new ProcessService(settings);
|
||||
this.jvmService = new JvmService(settings);
|
||||
this.fsService = new FsService(settings, nodeEnvironment);
|
||||
}
|
||||
|
||||
public OsService osService() {
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.monitor.fs;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.NodeEnvironment.NodePath;
|
||||
|
@ -31,7 +30,6 @@ public class FsProbe extends AbstractComponent {
|
|||
|
||||
private final NodeEnvironment nodeEnv;
|
||||
|
||||
@Inject
|
||||
public FsProbe(Settings settings, NodeEnvironment nodeEnv) {
|
||||
super(settings);
|
||||
this.nodeEnv = nodeEnv;
|
||||
|
|
|
@ -20,10 +20,10 @@
|
|||
package org.elasticsearch.monitor.fs;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.SingleObjectCache;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -35,10 +35,9 @@ public class FsService extends AbstractComponent {
|
|||
|
||||
private final SingleObjectCache<FsInfo> fsStatsCache;
|
||||
|
||||
@Inject
|
||||
public FsService(Settings settings, FsProbe probe) throws IOException {
|
||||
public FsService(Settings settings, NodeEnvironment nodeEnvironment) throws IOException {
|
||||
super(settings);
|
||||
this.probe = probe;
|
||||
this.probe = new FsProbe(settings, nodeEnvironment);
|
||||
TimeValue refreshInterval = settings.getAsTime("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1));
|
||||
fsStatsCache = new FsInfoCache(refreshInterval, probe.stats());
|
||||
logger.debug("Using probe [{}] with refresh_interval [{}]", probe, refreshInterval);
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.monitor.jvm;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
|
@ -71,7 +70,6 @@ public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorServ
|
|||
}
|
||||
}
|
||||
|
||||
@Inject
|
||||
public JvmMonitorService(Settings settings, ThreadPool threadPool) {
|
||||
super(settings);
|
||||
this.threadPool = threadPool;
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.monitor.jvm;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
|
@ -35,7 +34,6 @@ public class JvmService extends AbstractComponent {
|
|||
|
||||
private JvmStats jvmStats;
|
||||
|
||||
@Inject
|
||||
public JvmService(Settings settings) {
|
||||
super(settings);
|
||||
this.jvmInfo = JvmInfo.jvmInfo();
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.monitor.os;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.SingleObjectCache;
|
||||
|
@ -37,10 +36,9 @@ public class OsService extends AbstractComponent {
|
|||
|
||||
private SingleObjectCache<OsStats> osStatsCache;
|
||||
|
||||
@Inject
|
||||
public OsService(Settings settings, OsProbe probe) {
|
||||
public OsService(Settings settings) {
|
||||
super(settings);
|
||||
this.probe = probe;
|
||||
this.probe = OsProbe.getInstance();
|
||||
|
||||
TimeValue refreshInterval = settings.getAsTime("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1));
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.monitor.process;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.SingleObjectCache;
|
||||
|
@ -34,10 +33,9 @@ public final class ProcessService extends AbstractComponent {
|
|||
private final ProcessInfo info;
|
||||
private final SingleObjectCache<ProcessStats> processStatsCache;
|
||||
|
||||
@Inject
|
||||
public ProcessService(Settings settings, ProcessProbe probe) {
|
||||
public ProcessService(Settings settings) {
|
||||
super(settings);
|
||||
this.probe = probe;
|
||||
this.probe = ProcessProbe.getInstance();
|
||||
|
||||
final TimeValue refreshInterval = settings.getAsTime("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1));
|
||||
processStatsCache = new ProcessStatsCache(refreshInterval, probe.processStats());
|
||||
|
|
|
@ -20,8 +20,10 @@
|
|||
package org.elasticsearch.node;
|
||||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionModule;
|
||||
import org.elasticsearch.bootstrap.Elasticsearch;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClientModule;
|
||||
|
@ -41,7 +43,9 @@ import org.elasticsearch.common.lease.Releasables;
|
|||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.discovery.Discovery;
|
||||
import org.elasticsearch.discovery.DiscoveryModule;
|
||||
|
@ -66,7 +70,6 @@ import org.elasticsearch.indices.memory.IndexingMemoryController;
|
|||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||
import org.elasticsearch.indices.store.IndicesStore;
|
||||
import org.elasticsearch.indices.ttl.IndicesTTLService;
|
||||
import org.elasticsearch.monitor.MonitorModule;
|
||||
import org.elasticsearch.monitor.MonitorService;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
@ -155,11 +158,13 @@ public class Node implements Releasable {
|
|||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Failed to created node environment", ex);
|
||||
}
|
||||
|
||||
final NetworkService networkService = new NetworkService(settings);
|
||||
final NodeSettingsService nodeSettingsService = new NodeSettingsService(settings);
|
||||
final SettingsFilter settingsFilter = new SettingsFilter(settings);
|
||||
final ThreadPool threadPool = new ThreadPool(settings);
|
||||
|
||||
boolean success = false;
|
||||
try {
|
||||
final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool);
|
||||
ModulesBuilder modules = new ModulesBuilder();
|
||||
modules.add(new Version.Module(version));
|
||||
modules.add(new CircuitBreakerModule(settings));
|
||||
|
@ -168,9 +173,9 @@ public class Node implements Releasable {
|
|||
modules.add(pluginModule);
|
||||
}
|
||||
modules.add(new PluginsModule(pluginsService));
|
||||
modules.add(new SettingsModule(this.settings));
|
||||
modules.add(new NodeModule(this));
|
||||
modules.add(new NetworkModule());
|
||||
modules.add(new SettingsModule(this.settings, settingsFilter));
|
||||
modules.add(new NodeModule(this, nodeSettingsService, monitorService));
|
||||
modules.add(new NetworkModule(networkService));
|
||||
modules.add(new ScriptModule(this.settings));
|
||||
modules.add(new EnvironmentModule(environment));
|
||||
modules.add(new NodeEnvironmentModule(nodeEnvironment));
|
||||
|
@ -186,7 +191,6 @@ public class Node implements Releasable {
|
|||
modules.add(new IndicesModule());
|
||||
modules.add(new SearchModule());
|
||||
modules.add(new ActionModule(false));
|
||||
modules.add(new MonitorModule(settings));
|
||||
modules.add(new GatewayModule(settings));
|
||||
modules.add(new NodeClientModule());
|
||||
modules.add(new PercolatorModule());
|
||||
|
@ -195,7 +199,6 @@ public class Node implements Releasable {
|
|||
modules.add(new TribeModule());
|
||||
modules.add(new AnalysisModule(environment));
|
||||
|
||||
|
||||
pluginsService.processModules(modules);
|
||||
|
||||
injector = modules.createInjector();
|
||||
|
@ -203,6 +206,8 @@ public class Node implements Releasable {
|
|||
client = injector.getInstance(Client.class);
|
||||
threadPool.setNodeSettingsService(injector.getInstance(NodeSettingsService.class));
|
||||
success = true;
|
||||
} catch (IOException ex) {
|
||||
throw new ElasticsearchException("failed to bind service", ex);
|
||||
} finally {
|
||||
if (!success) {
|
||||
nodeEnvironment.close();
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.node;
|
|||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.monitor.MonitorService;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.service.NodeService;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
|
@ -32,13 +33,17 @@ import org.elasticsearch.node.settings.NodeSettingsService;
|
|||
public class NodeModule extends AbstractModule {
|
||||
|
||||
private final Node node;
|
||||
private final NodeSettingsService nodeSettingsService;
|
||||
private final MonitorService monitorService;
|
||||
|
||||
// pkg private so tests can mock
|
||||
Class<? extends PageCacheRecycler> pageCacheRecyclerImpl = PageCacheRecycler.class;
|
||||
Class<? extends BigArrays> bigArraysImpl = BigArrays.class;
|
||||
|
||||
public NodeModule(Node node) {
|
||||
public NodeModule(Node node, NodeSettingsService nodeSettingsService, MonitorService monitorService) {
|
||||
this.node = node;
|
||||
this.nodeSettingsService = nodeSettingsService;
|
||||
this.monitorService = monitorService;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -55,7 +60,8 @@ public class NodeModule extends AbstractModule {
|
|||
}
|
||||
|
||||
bind(Node.class).toInstance(node);
|
||||
bind(NodeSettingsService.class).asEagerSingleton();
|
||||
bind(NodeSettingsService.class).toInstance(nodeSettingsService);
|
||||
bind(MonitorService.class).toInstance(monitorService);
|
||||
bind(NodeService.class).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.rest.action.admin.cluster.repositories.verify;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -30,9 +29,6 @@ import org.elasticsearch.rest.action.support.RestToXContentListener;
|
|||
import static org.elasticsearch.client.Requests.verifyRepositoryRequest;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
/**
|
||||
* Registers repositories
|
||||
*/
|
||||
public class RestVerifyRepositoryAction extends BaseRestHandler {
|
||||
|
||||
@Inject
|
||||
|
@ -41,12 +37,11 @@ public class RestVerifyRepositoryAction extends BaseRestHandler {
|
|||
controller.registerHandler(POST, "/_snapshot/{repository}/_verify", this);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
|
||||
VerifyRepositoryRequest verifyRepositoryRequest = verifyRepositoryRequest(request.param("repository"));
|
||||
verifyRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", verifyRepositoryRequest.masterNodeTimeout()));
|
||||
verifyRepositoryRequest.timeout(request.paramAsTime("timeout", verifyRepositoryRequest.timeout()));
|
||||
client.admin().cluster().verifyRepository(verifyRepositoryRequest, new RestToXContentListener<VerifyRepositoryResponse>(channel));
|
||||
client.admin().cluster().verifyRepository(verifyRepositoryRequest, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,6 +57,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
|||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
|
@ -183,7 +184,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
clientInvocationHandler);
|
||||
injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
new SettingsModule(settings),
|
||||
new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new ThreadPoolModule(new ThreadPool(settings)),
|
||||
new IndicesModule() {
|
||||
@Override
|
||||
|
@ -640,7 +641,6 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
secondQuery.boost(firstQuery.boost() + 1f + randomFloat());
|
||||
}
|
||||
assertThat("different queries should not be equal", secondQuery, not(equalTo(firstQuery)));
|
||||
assertThat("different queries should have different hashcode", secondQuery.hashCode(), not(equalTo(firstQuery.hashCode())));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -896,7 +896,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
msg(expected, builder.string()),
|
||||
expected.replaceAll("\\s+",""),
|
||||
builder.string().replaceAll("\\s+",""));
|
||||
}
|
||||
}
|
||||
|
||||
private static String msg(String left, String right) {
|
||||
int size = Math.min(left.length(), right.length());
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.inject.ModulesBuilder;
|
|||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -95,7 +96,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
|
||||
injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
new SettingsModule(settings),
|
||||
new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new ThreadPoolModule(new ThreadPool(settings)),
|
||||
new IndicesModule() {
|
||||
@Override
|
||||
|
|
|
@ -460,36 +460,7 @@ public class TranslogTests extends ESTestCase {
|
|||
final CountDownLatch downLatch = new CountDownLatch(1);
|
||||
for (int i = 0; i < threadCount; i++) {
|
||||
final int threadId = i;
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
downLatch.await();
|
||||
for (int opCount = 0; opCount < opsPerThread; opCount++) {
|
||||
Translog.Operation op;
|
||||
switch (randomFrom(Translog.Operation.Type.values())) {
|
||||
case CREATE:
|
||||
case INDEX:
|
||||
op = new Translog.Index("test", threadId + "_" + opCount,
|
||||
randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8"));
|
||||
break;
|
||||
case DELETE:
|
||||
op = new Translog.Delete(new Term("_uid", threadId + "_" + opCount),
|
||||
1 + randomInt(100000),
|
||||
randomFrom(VersionType.values()));
|
||||
break;
|
||||
default:
|
||||
throw new ElasticsearchException("not supported op type");
|
||||
}
|
||||
|
||||
Translog.Location loc = translog.add(op);
|
||||
writtenOperations.add(new LocationOperation(op, loc));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
threadExceptions[threadId] = t;
|
||||
}
|
||||
}
|
||||
});
|
||||
threads[i] = new TranslogThread(translog, downLatch, opsPerThread, threadId, writtenOperations, threadExceptions);
|
||||
threads[i].setDaemon(true);
|
||||
threads[i].start();
|
||||
}
|
||||
|
@ -1220,4 +1191,92 @@ public class TranslogTests extends ESTestCase {
|
|||
assertNull(snapshot.next());
|
||||
}
|
||||
}
|
||||
|
||||
public void testFailOnClosedWrite() throws IOException {
|
||||
translog.add(new Translog.Index("test", "1", Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.close();
|
||||
try {
|
||||
translog.add(new Translog.Index("test", "1", Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
fail("closed");
|
||||
} catch (AlreadyClosedException ex) {
|
||||
// all is welll
|
||||
}
|
||||
}
|
||||
|
||||
public void testCloseConcurrently() throws Throwable {
|
||||
final int opsPerThread = randomIntBetween(10, 200);
|
||||
int threadCount = 2 + randomInt(5);
|
||||
|
||||
logger.info("testing with [{}] threads, each doing [{}] ops", threadCount, opsPerThread);
|
||||
final BlockingQueue<LocationOperation> writtenOperations = new ArrayBlockingQueue<>(threadCount * opsPerThread);
|
||||
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final Throwable[] threadExceptions = new Throwable[threadCount];
|
||||
final CountDownLatch downLatch = new CountDownLatch(1);
|
||||
for (int i = 0; i < threadCount; i++) {
|
||||
final int threadId = i;
|
||||
threads[i] = new TranslogThread(translog, downLatch, opsPerThread, threadId, writtenOperations, threadExceptions);
|
||||
threads[i].setDaemon(true);
|
||||
threads[i].start();
|
||||
}
|
||||
|
||||
downLatch.countDown();
|
||||
translog.close();
|
||||
|
||||
for (int i = 0; i < threadCount; i++) {
|
||||
if (threadExceptions[i] != null) {
|
||||
if ((threadExceptions[i] instanceof AlreadyClosedException) == false) {
|
||||
throw threadExceptions[i];
|
||||
}
|
||||
}
|
||||
threads[i].join(60 * 1000);
|
||||
}
|
||||
}
|
||||
|
||||
private static class TranslogThread extends Thread {
|
||||
private final CountDownLatch downLatch;
|
||||
private final int opsPerThread;
|
||||
private final int threadId;
|
||||
private final BlockingQueue<LocationOperation> writtenOperations;
|
||||
private final Throwable[] threadExceptions;
|
||||
private final Translog translog;
|
||||
|
||||
public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, BlockingQueue<LocationOperation> writtenOperations, Throwable[] threadExceptions) {
|
||||
this.translog = translog;
|
||||
this.downLatch = downLatch;
|
||||
this.opsPerThread = opsPerThread;
|
||||
this.threadId = threadId;
|
||||
this.writtenOperations = writtenOperations;
|
||||
this.threadExceptions = threadExceptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
downLatch.await();
|
||||
for (int opCount = 0; opCount < opsPerThread; opCount++) {
|
||||
Translog.Operation op;
|
||||
switch (randomFrom(Translog.Operation.Type.values())) {
|
||||
case CREATE:
|
||||
case INDEX:
|
||||
op = new Translog.Index("test", threadId + "_" + opCount,
|
||||
randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8"));
|
||||
break;
|
||||
case DELETE:
|
||||
op = new Translog.Delete(new Term("_uid", threadId + "_" + opCount),
|
||||
1 + randomInt(100000),
|
||||
randomFrom(VersionType.values()));
|
||||
break;
|
||||
default:
|
||||
throw new ElasticsearchException("not supported op type");
|
||||
}
|
||||
|
||||
Translog.Location loc = translog.add(op);
|
||||
writtenOperations.add(new LocationOperation(op, loc));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
threadExceptions[threadId] = t;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
|
@ -55,7 +56,7 @@ public class NativeScriptTests extends ESTestCase {
|
|||
Injector injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
new ThreadPoolModule(new ThreadPool(settings)),
|
||||
new SettingsModule(settings),
|
||||
new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
scriptModule).createInjector();
|
||||
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
|
|
|
@ -18,91 +18,18 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
||||
import org.elasticsearch.test.ESBackcompatTestCase;
|
||||
import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class SignificantTermsBackwardCompatibilityIT extends ESBackcompatTestCase {
|
||||
|
||||
static final String INDEX_NAME = "testidx";
|
||||
static final String DOC_TYPE = "doc";
|
||||
static final String TEXT_FIELD = "text";
|
||||
static final String CLASS_FIELD = "class";
|
||||
|
||||
/**
|
||||
* Test for streaming significant terms buckets to old es versions.
|
||||
*/
|
||||
public void testBucketStreaming() throws IOException, ExecutionException, InterruptedException {
|
||||
logger.debug("testBucketStreaming: indexing documents");
|
||||
String type = randomBoolean() ? "string" : "long";
|
||||
String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}";
|
||||
index01Docs(type, settings);
|
||||
ensureGreen();
|
||||
logClusterState();
|
||||
checkSignificantTermsAggregationCorrect();
|
||||
logger.debug("testBucketStreaming: done testing significant terms while upgrading");
|
||||
}
|
||||
|
||||
private void index01Docs(String type, String settings) throws ExecutionException, InterruptedException {
|
||||
String mappings = "{\"doc\": {\"properties\":{\"" + TEXT_FIELD + "\": {\"type\":\"" + type + "\"},\"" + CLASS_FIELD
|
||||
+ "\": {\"type\":\"string\"}}}}";
|
||||
assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
|
||||
String[] gb = {"0", "1"};
|
||||
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1")
|
||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2")
|
||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5")
|
||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6")
|
||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
indexRandom(true, indexRequestBuilderList);
|
||||
}
|
||||
|
||||
private void checkSignificantTermsAggregationCorrect() {
|
||||
|
||||
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
||||
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(
|
||||
new SignificantTermsBuilder("sig_terms")
|
||||
.field(TEXT_FIELD)))
|
||||
.execute()
|
||||
.actionGet();
|
||||
assertSearchResponse(response);
|
||||
StringTerms classes = response.getAggregations().get("class");
|
||||
assertThat(classes.getBuckets().size(), equalTo(2));
|
||||
for (Terms.Bucket classBucket : classes.getBuckets()) {
|
||||
Map<String, Aggregation> aggs = classBucket.getAggregations().asMap();
|
||||
assertTrue(aggs.containsKey("sig_terms"));
|
||||
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
|
||||
assertThat(agg.getBuckets().size(), equalTo(1));
|
||||
String term = agg.iterator().next().getKeyAsString();
|
||||
String classTerm = classBucket.getKeyAsString();
|
||||
assertTrue(term.equals(classTerm));
|
||||
}
|
||||
public void testAggregateAndCheckFromSeveralShards() throws IOException, ExecutionException, InterruptedException {
|
||||
SharedSignificantTermsTestMethods.aggregateAndCheckFromSeveralShards(this);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -39,54 +38,39 @@ import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuil
|
|||
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
|
||||
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptNoParams;
|
||||
import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptWithParams;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.*;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
||||
public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
|
||||
static final String INDEX_NAME = "testidx";
|
||||
static final String DOC_TYPE = "doc";
|
||||
static final String TEXT_FIELD = "text";
|
||||
static final String CLASS_FIELD = "class";
|
||||
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(CustomSignificanceHeuristicPlugin.class);
|
||||
|
@ -99,7 +83,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
public void testPlugin() throws Exception {
|
||||
String type = randomBoolean() ? "string" : "long";
|
||||
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
||||
index01Docs(type, settings);
|
||||
SharedSignificantTermsTestMethods.index01Docs(type, settings, this);
|
||||
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
||||
.addAggregation(new TermsBuilder("class")
|
||||
.field(CLASS_FIELD)
|
||||
|
@ -252,7 +236,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
public void testXContentResponse() throws Exception {
|
||||
String type = randomBoolean() ? "string" : "long";
|
||||
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
||||
index01Docs(type, settings);
|
||||
SharedSignificantTermsTestMethods.index01Docs(type, settings, this);
|
||||
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
||||
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(new SignificantTermsBuilder("sig_terms").field(TEXT_FIELD)))
|
||||
.execute()
|
||||
|
@ -327,7 +311,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
public void testBackgroundVsSeparateSet() throws Exception {
|
||||
String type = randomBoolean() ? "string" : "long";
|
||||
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
||||
index01Docs(type, settings);
|
||||
SharedSignificantTermsTestMethods.index01Docs(type, settings, this);
|
||||
testBackgroundVsSeparateSet(new MutualInformation.MutualInformationBuilder(true, true), new MutualInformation.MutualInformationBuilder(true, false));
|
||||
testBackgroundVsSeparateSet(new ChiSquare.ChiSquareBuilder(true, true), new ChiSquare.ChiSquareBuilder(true, false));
|
||||
testBackgroundVsSeparateSet(new GND.GNDBuilder(true), new GND.GNDBuilder(false));
|
||||
|
@ -388,28 +372,6 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
assertThat(score11Background, equalTo(score11SeparateSets));
|
||||
}
|
||||
|
||||
private void index01Docs(String type, String settings) throws ExecutionException, InterruptedException {
|
||||
String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"" + type + "\"}}}}";
|
||||
assertAcked(prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
|
||||
String[] gb = {"0", "1"};
|
||||
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1")
|
||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2")
|
||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5")
|
||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6")
|
||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
indexRandom(true, false, indexRequestBuilderList);
|
||||
}
|
||||
|
||||
public void testScoresEqualForPositiveAndNegative() throws Exception {
|
||||
indexEqualTestData();
|
||||
testScoresEqualForPositiveAndNegative(new MutualInformation.MutualInformationBuilder(true, true));
|
||||
|
@ -528,4 +490,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
}
|
||||
indexRandom(true, indexRequestBuilderList);
|
||||
}
|
||||
|
||||
public void testReduceFromSeveralShards() throws IOException, ExecutionException, InterruptedException {
|
||||
SharedSignificantTermsTestMethods.aggregateAndCheckFromSeveralShards(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
|||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
|
@ -79,7 +80,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
|||
.put("path.home", createTempDir())
|
||||
.build();
|
||||
injector = new ModulesBuilder().add(
|
||||
new SettingsModule(settings),
|
||||
new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new ThreadPoolModule(new ThreadPool(settings)),
|
||||
new IndicesModule() {
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.elasticsearch.test.ESIntegTestCase.client;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SharedSignificantTermsTestMethods {
|
||||
public static final String INDEX_NAME = "testidx";
|
||||
public static final String DOC_TYPE = "doc";
|
||||
public static final String TEXT_FIELD = "text";
|
||||
public static final String CLASS_FIELD = "class";
|
||||
|
||||
public static void aggregateAndCheckFromSeveralShards(ESIntegTestCase testCase) throws ExecutionException, InterruptedException {
|
||||
String type = ESTestCase.randomBoolean() ? "string" : "long";
|
||||
String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}";
|
||||
index01Docs(type, settings, testCase);
|
||||
testCase.ensureGreen();
|
||||
testCase.logClusterState();
|
||||
checkSignificantTermsAggregationCorrect(testCase);
|
||||
}
|
||||
|
||||
private static void checkSignificantTermsAggregationCorrect(ESIntegTestCase testCase) {
|
||||
|
||||
SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
||||
.addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(
|
||||
new SignificantTermsBuilder("sig_terms")
|
||||
.field(TEXT_FIELD)))
|
||||
.execute()
|
||||
.actionGet();
|
||||
assertSearchResponse(response);
|
||||
StringTerms classes = response.getAggregations().get("class");
|
||||
Assert.assertThat(classes.getBuckets().size(), equalTo(2));
|
||||
for (Terms.Bucket classBucket : classes.getBuckets()) {
|
||||
Map<String, Aggregation> aggs = classBucket.getAggregations().asMap();
|
||||
Assert.assertTrue(aggs.containsKey("sig_terms"));
|
||||
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
|
||||
Assert.assertThat(agg.getBuckets().size(), equalTo(1));
|
||||
SignificantTerms.Bucket sigBucket = agg.iterator().next();
|
||||
String term = sigBucket.getKeyAsString();
|
||||
String classTerm = classBucket.getKeyAsString();
|
||||
Assert.assertTrue(term.equals(classTerm));
|
||||
}
|
||||
}
|
||||
|
||||
public static void index01Docs(String type, String settings, ESIntegTestCase testCase) throws ExecutionException, InterruptedException {
|
||||
String mappings = "{\"doc\": {\"properties\":{\"text\": {\"type\":\"" + type + "\"}}}}";
|
||||
assertAcked(testCase.prepareCreate(INDEX_NAME).setSettings(settings).addMapping("doc", mappings));
|
||||
String[] gb = {"0", "1"};
|
||||
List<IndexRequestBuilder> indexRequestBuilderList = new ArrayList<>();
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1")
|
||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "2")
|
||||
.setSource(TEXT_FIELD, "1", CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "3")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "5")
|
||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "1"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "6")
|
||||
.setSource(TEXT_FIELD, gb, CLASS_FIELD, "0"));
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "7")
|
||||
.setSource(TEXT_FIELD, "0", CLASS_FIELD, "0"));
|
||||
testCase.indexRandom(true, false, indexRequestBuilderList);
|
||||
}
|
||||
}
|
|
@ -143,7 +143,9 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
|
|||
* MavenFilteringHack or any other copy-style action.
|
||||
*/
|
||||
configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
|
||||
integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
|
||||
// Currently disabled these because they are broken.
|
||||
// integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
|
||||
integTest.enabled = false
|
||||
File packagingFiles = new File(buildDir, 'packaging')
|
||||
project.ext.packagingFiles = packagingFiles
|
||||
task processPackagingFiles(type: Copy) {
|
||||
|
|
|
@ -12,7 +12,7 @@ combined unigram+bigram approach.
|
|||
|
||||
Bigrams are generated for characters in `han`, `hiragana`, `katakana` and
|
||||
`hangul`, but bigrams can be disabled for particular scripts with the
|
||||
`ignore_scripts` parameter. All non-CJK input is passed through unmodified.
|
||||
`ignored_scripts` parameter. All non-CJK input is passed through unmodified.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -28,7 +28,7 @@ Bigrams are generated for characters in `han`, `hiragana`, `katakana` and
|
|||
"filter" : {
|
||||
"han_bigrams_filter" : {
|
||||
"type" : "cjk_bigram",
|
||||
"ignore_scripts": [
|
||||
"ignored_scripts": [
|
||||
"hiragana",
|
||||
"katakana",
|
||||
"hangul"
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
|
@ -45,7 +46,7 @@ public class AnalysisTestUtils {
|
|||
.build();
|
||||
AnalysisModule analysisModule = new AnalysisModule(new Environment(settings));
|
||||
new AnalysisICUPlugin().onModule(analysisModule);
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new EnvironmentModule(new Environment(settings)), analysisModule)
|
||||
.createInjector();
|
||||
final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, indexSettings));
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
|
@ -203,7 +204,7 @@ public class KuromojiAnalysisTests extends ESTestCase {
|
|||
|
||||
AnalysisModule analysisModule = new AnalysisModule(new Environment(settings));
|
||||
new AnalysisKuromojiPlugin().onModule(analysisModule);
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new EnvironmentModule(new Environment(settings)), analysisModule)
|
||||
.createInjector();
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
|
@ -57,7 +58,7 @@ public class SimplePhoneticAnalysisTests extends ESTestCase {
|
|||
Index index = new Index("test");
|
||||
AnalysisModule analysisModule = new AnalysisModule(new Environment(settings));
|
||||
new AnalysisPhoneticPlugin().onModule(analysisModule);
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new EnvironmentModule(new Environment(settings)), analysisModule)
|
||||
.createInjector();
|
||||
return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings));
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
|
@ -50,7 +51,7 @@ public class SimpleSmartChineseAnalysisTests extends ESTestCase {
|
|||
.build();
|
||||
AnalysisModule analysisModule = new AnalysisModule(new Environment(settings));
|
||||
new AnalysisSmartChinesePlugin().onModule(analysisModule);
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new EnvironmentModule(new Environment(settings)), analysisModule)
|
||||
.createInjector();
|
||||
final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings));
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
|
@ -55,7 +56,7 @@ public class PolishAnalysisTests extends ESTestCase {
|
|||
|
||||
AnalysisModule analysisModule = new AnalysisModule(new Environment(settings));
|
||||
new AnalysisStempelPlugin().onModule(analysisModule);
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new EnvironmentModule(new Environment(settings)), analysisModule)
|
||||
.createInjector();
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
|
@ -99,7 +100,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase {
|
|||
private AnalysisService createAnalysisService(Index index, Settings settings) throws IOException {
|
||||
AnalysisModule analysisModule = new AnalysisModule(new Environment(settings));
|
||||
new AnalysisStempelPlugin().onModule(analysisModule);
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)),
|
||||
new EnvironmentModule(new Environment(settings)), analysisModule)
|
||||
.createInjector();
|
||||
return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings));
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.plugins.Plugin;
|
|||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -37,6 +38,7 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
|
||||
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
|
||||
|
@ -47,8 +49,9 @@ public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase {
|
|||
return Collections.singleton(MapperMurmur3Plugin.class);
|
||||
}
|
||||
|
||||
public void testUpgradeOldMapping() throws IOException {
|
||||
public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException {
|
||||
final String indexName = "index-mapper-murmur3-2.0.0";
|
||||
InternalTestCluster.Async<String> master = internalCluster().startNodeAsync();
|
||||
Path unzipDir = createTempDir();
|
||||
Path unzipDataDir = unzipDir.resolve("data");
|
||||
Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip");
|
||||
|
@ -61,7 +64,7 @@ public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put("path.data", dataPath)
|
||||
.build();
|
||||
final String node = internalCluster().startNode(settings);
|
||||
final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master
|
||||
Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths();
|
||||
assertEquals(1, nodePaths.length);
|
||||
dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER);
|
||||
|
@ -69,7 +72,10 @@ public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase {
|
|||
Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices");
|
||||
Files.move(src, dataPath);
|
||||
|
||||
ensureYellow();
|
||||
master.get();
|
||||
// force reloading dangling indices with a cluster state republish
|
||||
client().admin().cluster().prepareReroute().get();
|
||||
ensureGreen(indexName);
|
||||
final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get();
|
||||
ElasticsearchAssertions.assertHitCount(countResponse, 3L);
|
||||
|
||||
|
@ -78,5 +84,4 @@ public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase {
|
|||
Cardinality cardinality = cardinalityResponse.getAggregations().get("card");
|
||||
assertEquals(3L, cardinality.getValue());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.plugins.Plugin;
|
|||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -38,6 +39,7 @@ import java.nio.file.Path;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
|
||||
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
|
||||
|
@ -48,8 +50,9 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase {
|
|||
return Collections.singleton(MapperSizePlugin.class);
|
||||
}
|
||||
|
||||
public void testUpgradeOldMapping() throws IOException {
|
||||
public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException {
|
||||
final String indexName = "index-mapper-size-2.0.0";
|
||||
InternalTestCluster.Async<String> master = internalCluster().startNodeAsync();
|
||||
Path unzipDir = createTempDir();
|
||||
Path unzipDataDir = unzipDir.resolve("data");
|
||||
Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip");
|
||||
|
@ -62,15 +65,17 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put("path.data", dataPath)
|
||||
.build();
|
||||
final String node = internalCluster().startNode(settings);
|
||||
final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master
|
||||
Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths();
|
||||
assertEquals(1, nodePaths.length);
|
||||
dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER);
|
||||
assertFalse(Files.exists(dataPath));
|
||||
Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices");
|
||||
Files.move(src, dataPath);
|
||||
|
||||
ensureYellow();
|
||||
master.get();
|
||||
// force reloading dangling indices with a cluster state republish
|
||||
client().admin().cluster().prepareReroute().get();
|
||||
ensureGreen(indexName);
|
||||
final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get();
|
||||
ElasticsearchAssertions.assertHitCount(countResponse, 3L);
|
||||
|
||||
|
|
|
@ -19,11 +19,15 @@
|
|||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
/** Simple tests seccomp filter is working. */
|
||||
public class SeccompTests extends ESTestCase {
|
||||
|
||||
/** command to try to run in tests */
|
||||
static final String EXECUTABLE = Constants.WINDOWS ? "calc" : "ls";
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
@ -44,7 +48,7 @@ public class SeccompTests extends ESTestCase {
|
|||
|
||||
public void testNoExecution() throws Exception {
|
||||
try {
|
||||
Runtime.getRuntime().exec("ls");
|
||||
Runtime.getRuntime().exec(EXECUTABLE);
|
||||
fail("should not have been able to execute!");
|
||||
} catch (Exception expected) {
|
||||
// we can't guarantee how its converted, currently its an IOException, like this:
|
||||
|
@ -70,7 +74,7 @@ public class SeccompTests extends ESTestCase {
|
|||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
Runtime.getRuntime().exec("ls");
|
||||
Runtime.getRuntime().exec(EXECUTABLE);
|
||||
fail("should not have been able to execute!");
|
||||
} catch (Exception expected) {
|
||||
// ok
|
||||
|
|
Loading…
Reference in New Issue