Merge branch 'master' into feature-suggest-refactoring
Conflicts: docs/reference/migration/migrate_5_0/java.asciidoc
This commit is contained in:
commit
39667b5793
|
@ -413,6 +413,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
systemProperty 'jna.nosys', 'true'
|
||||
// default test sysprop values
|
||||
systemProperty 'tests.ifNoTests', 'fail'
|
||||
// TODO: remove setting logging level via system property
|
||||
systemProperty 'es.logger.level', 'WARN'
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
if (property.getKey().startsWith('tests.') ||
|
||||
|
|
|
@ -64,7 +64,7 @@ class PrecommitTasks {
|
|||
project.forbiddenApis {
|
||||
internalRuntimeForbidden = true
|
||||
failOnUnsupportedJava = false
|
||||
bundledSignatures = ['jdk-unsafe', 'jdk-deprecated']
|
||||
bundledSignatures = ['jdk-unsafe', 'jdk-deprecated', 'jdk-system-out']
|
||||
signaturesURLs = [getClass().getResource('/forbidden/jdk-signatures.txt'),
|
||||
getClass().getResource('/forbidden/es-all-signatures.txt')]
|
||||
suppressAnnotations = ['**.SuppressForbidden']
|
||||
|
@ -72,7 +72,6 @@ class PrecommitTasks {
|
|||
Task mainForbidden = project.tasks.findByName('forbiddenApisMain')
|
||||
if (mainForbidden != null) {
|
||||
mainForbidden.configure {
|
||||
bundledSignatures += 'jdk-system-out'
|
||||
signaturesURLs += getClass().getResource('/forbidden/es-core-signatures.txt')
|
||||
}
|
||||
}
|
||||
|
|
|
@ -133,14 +133,15 @@ class NodeInfo {
|
|||
'JAVA_HOME' : project.javaHome,
|
||||
'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc
|
||||
]
|
||||
args.add("-Des.node.portsfile=true")
|
||||
args.addAll(config.systemProperties.collect { key, value -> "-D${key}=${value}" })
|
||||
args.addAll("-E", "es.node.portsfile=true")
|
||||
env.put('ES_JAVA_OPTS', config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" "))
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
if (property.getKey().startsWith('es.')) {
|
||||
args.add("-D${property.getKey()}=${property.getValue()}")
|
||||
args.add("-E")
|
||||
args.add("${property.getKey()}=${property.getValue()}")
|
||||
}
|
||||
}
|
||||
args.add("-Des.path.conf=${confDir}")
|
||||
args.addAll("-E", "es.path.conf=${confDir}")
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
args.add('"') // end the entire command, quoted
|
||||
}
|
||||
|
|
|
@ -258,7 +258,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequest.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]Bootstrap.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]BootstrapCLIParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNAKernel32Library.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNANatives.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JVMCheck.java" checks="LineLength" />
|
||||
|
@ -1587,7 +1586,6 @@
|
|||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]aws[/\\]blobstore[/\\]MockDefaultS3OutputStream.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]AbstractS3SnapshotRestoreTest.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]store-smb[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]store[/\\]SmbDirectoryWrapper.java" checks="LineLength" />
|
||||
<suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]BootstrapCliParserTests.java" checks="LineLength" />
|
||||
<suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]ESPolicyUnitTests.java" checks="LineLength" />
|
||||
<suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]EvilSecurityTests.java" checks="LineLength" />
|
||||
<suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]cli[/\\]CheckFileCommandTests.java" checks="LineLength" />
|
||||
|
|
|
@ -60,6 +60,8 @@ public class Version {
|
|||
public static final Version V_2_1_2 = new Version(V_2_1_2_ID, org.apache.lucene.util.Version.LUCENE_5_3_1);
|
||||
public static final int V_2_2_0_ID = 2020099;
|
||||
public static final Version V_2_2_0 = new Version(V_2_2_0_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
|
||||
public static final int V_2_2_1_ID = 2020199;
|
||||
public static final Version V_2_2_1 = new Version(V_2_2_1_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
|
||||
public static final int V_2_3_0_ID = 2030099;
|
||||
public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_5_0_0_ID = 5000099;
|
||||
|
@ -81,6 +83,8 @@ public class Version {
|
|||
return V_5_0_0;
|
||||
case V_2_3_0_ID:
|
||||
return V_2_3_0;
|
||||
case V_2_2_1_ID:
|
||||
return V_2_2_1;
|
||||
case V_2_2_0_ID:
|
||||
return V_2_2_0;
|
||||
case V_2_1_2_ID:
|
||||
|
|
|
@ -19,21 +19,14 @@
|
|||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.PidFile;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.inject.CreationException;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
|
@ -47,7 +40,13 @@ import org.elasticsearch.monitor.process.ProcessProbe;
|
|||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/**
|
||||
* Internal startup code.
|
||||
|
@ -189,9 +188,13 @@ final class Bootstrap {
|
|||
node = new Node(nodeSettings);
|
||||
}
|
||||
|
||||
private static Environment initialSettings(boolean foreground) {
|
||||
private static Environment initialSettings(boolean foreground, String pidFile) {
|
||||
Terminal terminal = foreground ? Terminal.DEFAULT : null;
|
||||
return InternalSettingsPreparer.prepareEnvironment(EMPTY_SETTINGS, terminal);
|
||||
Settings.Builder builder = Settings.builder();
|
||||
if (Strings.hasLength(pidFile)) {
|
||||
builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile);
|
||||
}
|
||||
return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal);
|
||||
}
|
||||
|
||||
private void start() {
|
||||
|
@ -218,22 +221,18 @@ final class Bootstrap {
|
|||
* This method is invoked by {@link Elasticsearch#main(String[])}
|
||||
* to startup elasticsearch.
|
||||
*/
|
||||
static void init(String[] args) throws Throwable {
|
||||
static void init(
|
||||
final boolean foreground,
|
||||
final String pidFile,
|
||||
final Map<String, String> esSettings) throws Throwable {
|
||||
// Set the system property before anything has a chance to trigger its use
|
||||
initLoggerPrefix();
|
||||
|
||||
BootstrapCliParser parser = new BootstrapCliParser();
|
||||
int status = parser.main(args, Terminal.DEFAULT);
|
||||
|
||||
if (parser.shouldRun() == false || status != ExitCodes.OK) {
|
||||
exit(status);
|
||||
}
|
||||
elasticsearchSettings(esSettings);
|
||||
|
||||
INSTANCE = new Bootstrap();
|
||||
|
||||
boolean foreground = !"false".equals(System.getProperty("es.foreground", System.getProperty("es-foreground")));
|
||||
|
||||
Environment environment = initialSettings(foreground);
|
||||
Environment environment = initialSettings(foreground, pidFile);
|
||||
Settings settings = environment.settings();
|
||||
LogConfigurator.configure(settings, true);
|
||||
checkForCustomConfFile();
|
||||
|
@ -297,6 +296,13 @@ final class Bootstrap {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Sets system properties passed as CLI parameters")
|
||||
private static void elasticsearchSettings(Map<String, String> esSettings) {
|
||||
for (Map.Entry<String, String> esSetting : esSettings.entrySet()) {
|
||||
System.setProperty(esSetting.getKey(), esSetting.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "System#out")
|
||||
private static void closeSystOut() {
|
||||
System.out.close();
|
||||
|
|
|
@ -1,95 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
||||
final class BootstrapCliParser extends Command {
|
||||
|
||||
private final OptionSpec<Void> versionOption;
|
||||
private final OptionSpec<Void> daemonizeOption;
|
||||
private final OptionSpec<String> pidfileOption;
|
||||
private final OptionSpec<String> propertyOption;
|
||||
private boolean shouldRun = false;
|
||||
|
||||
BootstrapCliParser() {
|
||||
super("Starts elasticsearch");
|
||||
// TODO: in jopt-simple 5.0, make this mutually exclusive with all other options
|
||||
versionOption = parser.acceptsAll(Arrays.asList("V", "version"),
|
||||
"Prints elasticsearch version information and exits");
|
||||
daemonizeOption = parser.acceptsAll(Arrays.asList("d", "daemonize"),
|
||||
"Starts Elasticsearch in the background");
|
||||
// TODO: in jopt-simple 5.0 this option type can be a Path
|
||||
pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"),
|
||||
"Creates a pid file in the specified path on start")
|
||||
.withRequiredArg();
|
||||
propertyOption = parser.accepts("D", "Configures an Elasticsearch setting")
|
||||
.withRequiredArg();
|
||||
}
|
||||
|
||||
// TODO: don't use system properties as a way to do this, its horrible...
|
||||
@SuppressForbidden(reason = "Sets system properties passed as CLI parameters")
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
if (options.has(versionOption)) {
|
||||
terminal.println("Version: " + org.elasticsearch.Version.CURRENT
|
||||
+ ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date()
|
||||
+ ", JVM: " + JvmInfo.jvmInfo().version());
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: don't use sysprops for any of these! pass the args through to bootstrap...
|
||||
if (options.has(daemonizeOption)) {
|
||||
System.setProperty("es.foreground", "false");
|
||||
}
|
||||
String pidFile = pidfileOption.value(options);
|
||||
if (Strings.isNullOrEmpty(pidFile) == false) {
|
||||
System.setProperty("es.pidfile", pidFile);
|
||||
}
|
||||
|
||||
for (String property : propertyOption.values(options)) {
|
||||
String[] keyValue = property.split("=", 2);
|
||||
if (keyValue.length != 2) {
|
||||
throw new UserError(ExitCodes.USAGE, "Malformed elasticsearch setting, must be of the form key=value");
|
||||
}
|
||||
String key = keyValue[0];
|
||||
if (key.startsWith("es.") == false) {
|
||||
key = "es." + key;
|
||||
}
|
||||
System.setProperty(key, keyValue[1]);
|
||||
}
|
||||
shouldRun = true;
|
||||
}
|
||||
|
||||
boolean shouldRun() {
|
||||
return shouldRun;
|
||||
}
|
||||
}
|
|
@ -19,23 +19,94 @@
|
|||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import joptsimple.util.KeyValuePair;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This class starts elasticsearch.
|
||||
*/
|
||||
public final class Elasticsearch {
|
||||
class Elasticsearch extends Command {
|
||||
|
||||
/** no instantiation */
|
||||
private Elasticsearch() {}
|
||||
private final OptionSpec<Void> versionOption;
|
||||
private final OptionSpec<Void> daemonizeOption;
|
||||
private final OptionSpec<String> pidfileOption;
|
||||
private final OptionSpec<KeyValuePair> propertyOption;
|
||||
|
||||
// visible for testing
|
||||
Elasticsearch() {
|
||||
super("starts elasticsearch");
|
||||
// TODO: in jopt-simple 5.0, make this mutually exclusive with all other options
|
||||
versionOption = parser.acceptsAll(Arrays.asList("V", "version"),
|
||||
"Prints elasticsearch version information and exits");
|
||||
daemonizeOption = parser.acceptsAll(Arrays.asList("d", "daemonize"),
|
||||
"Starts Elasticsearch in the background");
|
||||
// TODO: in jopt-simple 5.0 this option type can be a Path
|
||||
pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"),
|
||||
"Creates a pid file in the specified path on start")
|
||||
.withRequiredArg();
|
||||
propertyOption = parser.accepts("E", "Configure an Elasticsearch setting").withRequiredArg().ofType(KeyValuePair.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Main entry point for starting elasticsearch
|
||||
*/
|
||||
public static void main(String[] args) throws Exception {
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final Elasticsearch elasticsearch = new Elasticsearch();
|
||||
int status = main(args, elasticsearch, Terminal.DEFAULT);
|
||||
if (status != ExitCodes.OK) {
|
||||
exit(status);
|
||||
}
|
||||
}
|
||||
|
||||
static int main(final String[] args, final Elasticsearch elasticsearch, final Terminal terminal) throws Exception {
|
||||
return elasticsearch.main(args, terminal);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
if (options.has(versionOption)) {
|
||||
if (options.has(daemonizeOption) || options.has(pidfileOption)) {
|
||||
throw new UserError(ExitCodes.USAGE, "Elasticsearch version option is mutually exclusive with any other option");
|
||||
}
|
||||
terminal.println("Version: " + org.elasticsearch.Version.CURRENT
|
||||
+ ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date()
|
||||
+ ", JVM: " + JvmInfo.jvmInfo().version());
|
||||
return;
|
||||
}
|
||||
|
||||
final boolean daemonize = options.has(daemonizeOption);
|
||||
final String pidFile = pidfileOption.value(options);
|
||||
|
||||
final Map<String, String> esSettings = new HashMap<>();
|
||||
for (final KeyValuePair kvp : propertyOption.values(options)) {
|
||||
if (!kvp.key.startsWith("es.")) {
|
||||
throw new UserError(ExitCodes.USAGE, "Elasticsearch settings must be prefixed with [es.] but was [" + kvp.key + "]");
|
||||
}
|
||||
if (kvp.value.isEmpty()) {
|
||||
throw new UserError(ExitCodes.USAGE, "Elasticsearch setting [" + kvp.key + "] must not be empty");
|
||||
}
|
||||
esSettings.put(kvp.key, kvp.value);
|
||||
}
|
||||
|
||||
init(daemonize, pidFile, esSettings);
|
||||
}
|
||||
|
||||
void init(final boolean daemonize, final String pidFile, final Map<String, String> esSettings) {
|
||||
try {
|
||||
Bootstrap.init(args);
|
||||
} catch (Throwable t) {
|
||||
Bootstrap.init(!daemonize, pidFile, esSettings);
|
||||
} catch (final Throwable t) {
|
||||
// format exceptions to the console in a special way
|
||||
// to avoid 2MB stacktraces from guice, etc.
|
||||
throw new StartupError(t);
|
||||
|
|
|
@ -110,9 +110,7 @@ public class LogConfigurator {
|
|||
if (resolveConfig) {
|
||||
resolveConfig(environment, settingsBuilder);
|
||||
}
|
||||
settingsBuilder
|
||||
.putProperties("elasticsearch.", BootstrapInfo.getSystemProperties())
|
||||
.putProperties("es.", BootstrapInfo.getSystemProperties());
|
||||
settingsBuilder.putProperties("es.", BootstrapInfo.getSystemProperties());
|
||||
// add custom settings after config was added so that they are not overwritten by config
|
||||
settingsBuilder.put(settings);
|
||||
settingsBuilder.replacePropertyPlaceholders();
|
||||
|
|
|
@ -1136,10 +1136,10 @@ public final class Settings implements ToXContent {
|
|||
* @param properties The properties to put
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putProperties(String prefix, Dictionary<Object,Object> properties) {
|
||||
for (Object key1 : Collections.list(properties.keys())) {
|
||||
String key = Objects.toString(key1);
|
||||
String value = Objects.toString(properties.get(key));
|
||||
public Builder putProperties(String prefix, Dictionary<Object, Object> properties) {
|
||||
for (Object property : Collections.list(properties.keys())) {
|
||||
String key = Objects.toString(property);
|
||||
String value = Objects.toString(properties.get(property));
|
||||
if (key.startsWith(prefix)) {
|
||||
map.put(key.substring(prefix.length()), value);
|
||||
}
|
||||
|
@ -1154,19 +1154,12 @@ public final class Settings implements ToXContent {
|
|||
* @param properties The properties to put
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putProperties(String prefix, Dictionary<Object,Object> properties, String[] ignorePrefixes) {
|
||||
for (Object key1 : Collections.list(properties.keys())) {
|
||||
String key = Objects.toString(key1);
|
||||
String value = Objects.toString(properties.get(key));
|
||||
public Builder putProperties(String prefix, Dictionary<Object, Object> properties, String ignorePrefix) {
|
||||
for (Object property : Collections.list(properties.keys())) {
|
||||
String key = Objects.toString(property);
|
||||
String value = Objects.toString(properties.get(property));
|
||||
if (key.startsWith(prefix)) {
|
||||
boolean ignore = false;
|
||||
for (String ignorePrefix : ignorePrefixes) {
|
||||
if (key.startsWith(ignorePrefix)) {
|
||||
ignore = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!ignore) {
|
||||
if (!key.startsWith(ignorePrefix)) {
|
||||
map.put(key.substring(prefix.length()), value);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -743,7 +743,9 @@ public class InternalEngine extends Engine {
|
|||
indexWriter.forceMerge(maxNumSegments, true /* blocks and waits for merges*/);
|
||||
}
|
||||
if (flush) {
|
||||
flush(true, true);
|
||||
if (tryRenewSyncCommit() == false) {
|
||||
flush(false, true);
|
||||
}
|
||||
}
|
||||
if (upgrade) {
|
||||
logger.info("finished segment upgrade");
|
||||
|
|
|
@ -154,8 +154,6 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
|||
for (MappedFieldType fieldType : this) {
|
||||
if (Regex.simpleMatch(pattern, fieldType.name())) {
|
||||
fields.add(fieldType.name());
|
||||
} else if (Regex.simpleMatch(pattern, fieldType.name())) {
|
||||
fields.add(fieldType.name());
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
|
|
|
@ -42,7 +42,7 @@ public class ConstantScoreQueryParser implements QueryParser<ConstantScoreQueryB
|
|||
public ConstantScoreQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
QueryBuilder query = null;
|
||||
QueryBuilder<?> query = null;
|
||||
boolean queryFound = false;
|
||||
String queryName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
@ -56,6 +56,10 @@ public class ConstantScoreQueryParser implements QueryParser<ConstantScoreQueryB
|
|||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_QUERY_FIELD)) {
|
||||
if (queryFound) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ConstantScoreQueryBuilder.NAME + "]"
|
||||
+ " accepts only one 'filter' element.");
|
||||
}
|
||||
query = parseContext.parseInnerQueryBuilder();
|
||||
queryFound = true;
|
||||
} else {
|
||||
|
@ -69,6 +73,8 @@ public class ConstantScoreQueryParser implements QueryParser<ConstantScoreQueryB
|
|||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[constant_score] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "]");
|
||||
}
|
||||
}
|
||||
if (!queryFound) {
|
||||
|
|
|
@ -624,12 +624,17 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
assert shardId.getIndex().equals(indexSettings.getIndex());
|
||||
final IndexService indexService = indexService(shardId.getIndex());
|
||||
if (indexSettings.isOnSharedFilesystem() == false) {
|
||||
if (indexService != null && nodeEnv.hasNodeFile()) {
|
||||
return indexService.hasShard(shardId.id()) == false;
|
||||
} else if (nodeEnv.hasNodeFile()) {
|
||||
if (indexSettings.hasCustomDataPath()) {
|
||||
if (nodeEnv.hasNodeFile()) {
|
||||
final boolean isAllocated = indexService != null && indexService.hasShard(shardId.id());
|
||||
if (isAllocated) {
|
||||
return false; // we are allocated - can't delete the shard
|
||||
} else if (indexSettings.hasCustomDataPath()) {
|
||||
// lets see if it's on a custom path (return false if the shared doesn't exist)
|
||||
// we don't need to delete anything that is not there
|
||||
return Files.exists(nodeEnv.resolveCustomLocation(indexSettings, shardId));
|
||||
} else {
|
||||
// lets see if it's path is available (return false if the shared doesn't exist)
|
||||
// we don't need to delete anything that is not there
|
||||
return FileSystemUtils.exists(nodeEnv.availableShardPaths(shardId));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,8 +53,8 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
|||
public class InternalSettingsPreparer {
|
||||
|
||||
private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json", ".properties"};
|
||||
static final String[] PROPERTY_PREFIXES = {"es.", "elasticsearch."};
|
||||
static final String[] PROPERTY_DEFAULTS_PREFIXES = {"es.default.", "elasticsearch.default."};
|
||||
static final String PROPERTY_PREFIX = "es.";
|
||||
static final String PROPERTY_DEFAULTS_PREFIX = "es.default.";
|
||||
|
||||
public static final String SECRET_PROMPT_VALUE = "${prompt.secret}";
|
||||
public static final String TEXT_PROMPT_VALUE = "${prompt.text}";
|
||||
|
@ -126,13 +126,9 @@ public class InternalSettingsPreparer {
|
|||
output.put(input);
|
||||
if (useSystemProperties(input)) {
|
||||
if (loadDefaults) {
|
||||
for (String prefix : PROPERTY_DEFAULTS_PREFIXES) {
|
||||
output.putProperties(prefix, BootstrapInfo.getSystemProperties());
|
||||
}
|
||||
}
|
||||
for (String prefix : PROPERTY_PREFIXES) {
|
||||
output.putProperties(prefix, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIXES);
|
||||
output.putProperties(PROPERTY_DEFAULTS_PREFIX, BootstrapInfo.getSystemProperties());
|
||||
}
|
||||
output.putProperties(PROPERTY_PREFIX, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIX);
|
||||
}
|
||||
output.replacePropertyPlaceholders();
|
||||
}
|
||||
|
|
|
@ -53,9 +53,9 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
|
||||
private String unmappedType;
|
||||
|
||||
private String sortMode;
|
||||
private SortMode sortMode;
|
||||
|
||||
private QueryBuilder nestedFilter;
|
||||
private QueryBuilder<?> nestedFilter;
|
||||
|
||||
private String nestedPath;
|
||||
|
||||
|
@ -65,7 +65,9 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
this.order(template.order());
|
||||
this.missing(template.missing());
|
||||
this.unmappedType(template.unmappedType());
|
||||
this.sortMode(template.sortMode());
|
||||
if (template.sortMode != null) {
|
||||
this.sortMode(template.sortMode());
|
||||
}
|
||||
this.setNestedFilter(template.getNestedFilter());
|
||||
this.setNestedPath(template.getNestedPath());
|
||||
}
|
||||
|
@ -134,12 +136,12 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
* Defines what values to pick in the case a document contains multiple
|
||||
* values for the targeted sort field. Possible values: min, max, sum and
|
||||
* avg
|
||||
*
|
||||
* TODO would love to see an enum here
|
||||
*
|
||||
* <p>
|
||||
* The last two values are only applicable for number based fields.
|
||||
*/
|
||||
public FieldSortBuilder sortMode(String sortMode) {
|
||||
public FieldSortBuilder sortMode(SortMode sortMode) {
|
||||
Objects.requireNonNull(sortMode, "sort mode cannot be null");
|
||||
this.sortMode = sortMode;
|
||||
return this;
|
||||
}
|
||||
|
@ -148,14 +150,14 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
* Returns what values to pick in the case a document contains multiple
|
||||
* values for the targeted sort field.
|
||||
*/
|
||||
public String sortMode() {
|
||||
public SortMode sortMode() {
|
||||
return this.sortMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested filter that the nested objects should match with in order
|
||||
* to be taken into account for sorting.
|
||||
*
|
||||
*
|
||||
* TODO should the above getters and setters be deprecated/ changed in
|
||||
* favour of real getters and setters?
|
||||
*/
|
||||
|
@ -263,7 +265,10 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
|
||||
out.writeOptionalString(this.sortMode);
|
||||
out.writeBoolean(this.sortMode != null);
|
||||
if (this.sortMode != null) {
|
||||
this.sortMode.writeTo(out);
|
||||
}
|
||||
out.writeOptionalString(this.unmappedType);
|
||||
}
|
||||
|
||||
|
@ -272,7 +277,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
String fieldName = in.readString();
|
||||
FieldSortBuilder result = new FieldSortBuilder(fieldName);
|
||||
if (in.readBoolean()) {
|
||||
QueryBuilder query = in.readQuery();
|
||||
QueryBuilder<?> query = in.readQuery();
|
||||
result.setNestedFilter(query);
|
||||
}
|
||||
result.setNestedPath(in.readOptionalString());
|
||||
|
@ -281,7 +286,9 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
if (in.readBoolean()) {
|
||||
result.order(SortOrder.readOrderFrom(in));
|
||||
}
|
||||
result.sortMode(in.readOptionalString());
|
||||
if (in.readBoolean()) {
|
||||
result.sortMode(SortMode.PROTOTYPE.readFrom(in));
|
||||
}
|
||||
result.unmappedType(in.readOptionalString());
|
||||
return result;
|
||||
}
|
||||
|
@ -290,11 +297,11 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
public FieldSortBuilder fromXContent(QueryParseContext context, String fieldName) throws IOException {
|
||||
XContentParser parser = context.parser();
|
||||
|
||||
QueryBuilder nestedFilter = null;
|
||||
QueryBuilder<?> nestedFilter = null;
|
||||
String nestedPath = null;
|
||||
Object missing = null;
|
||||
SortOrder order = null;
|
||||
String sortMode = null;
|
||||
SortMode sortMode = null;
|
||||
String unmappedType = null;
|
||||
|
||||
String currentFieldName = null;
|
||||
|
@ -328,7 +335,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
|
|||
throw new IllegalStateException("Sort order " + sortOrder + " not supported.");
|
||||
}
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SORT_MODE)) {
|
||||
sortMode = parser.text();
|
||||
sortMode = SortMode.fromString(parser.text());
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, UNMAPPED_TYPE)) {
|
||||
unmappedType = parser.text();
|
||||
} else {
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -55,8 +54,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
private GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||
private DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||
|
||||
// TODO there is an enum that covers that parameter which we should be using here
|
||||
private String sortMode = null;
|
||||
private SortMode sortMode = null;
|
||||
@SuppressWarnings("rawtypes")
|
||||
private QueryBuilder nestedFilter;
|
||||
private String nestedPath;
|
||||
|
@ -204,9 +202,9 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
* Defines which distance to use for sorting in the case a document contains multiple geo points.
|
||||
* Possible values: min and max
|
||||
*/
|
||||
public GeoDistanceSortBuilder sortMode(String sortMode) {
|
||||
MultiValueMode temp = MultiValueMode.fromString(sortMode);
|
||||
if (temp == MultiValueMode.SUM) {
|
||||
public GeoDistanceSortBuilder sortMode(SortMode sortMode) {
|
||||
Objects.requireNonNull(sortMode, "sort mode cannot be null");
|
||||
if (sortMode == SortMode.SUM) {
|
||||
throw new IllegalArgumentException("sort_mode [sum] isn't supported for sorting by geo distance");
|
||||
}
|
||||
this.sortMode = sortMode;
|
||||
|
@ -214,7 +212,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
}
|
||||
|
||||
/** Returns which distance to use for sorting in the case a document contains multiple geo points. */
|
||||
public String sortMode() {
|
||||
public SortMode sortMode() {
|
||||
return this.sortMode;
|
||||
}
|
||||
|
||||
|
@ -345,7 +343,10 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
geoDistance.writeTo(out);
|
||||
unit.writeTo(out);
|
||||
order.writeTo(out);
|
||||
out.writeOptionalString(sortMode);
|
||||
out.writeBoolean(this.sortMode != null);
|
||||
if (this.sortMode != null) {
|
||||
sortMode.writeTo(out);
|
||||
}
|
||||
if (nestedFilter != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeQuery(nestedFilter);
|
||||
|
@ -367,9 +368,8 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
result.geoDistance(GeoDistance.readGeoDistanceFrom(in));
|
||||
result.unit(DistanceUnit.readDistanceUnit(in));
|
||||
result.order(SortOrder.readOrderFrom(in));
|
||||
String sortMode = in.readOptionalString();
|
||||
if (sortMode != null) {
|
||||
result.sortMode(sortMode);
|
||||
if (in.readBoolean()) {
|
||||
result.sortMode = SortMode.PROTOTYPE.readFrom(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
result.setNestedFilter(in.readQuery());
|
||||
|
@ -388,7 +388,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||
SortOrder order = SortOrder.ASC;
|
||||
MultiValueMode sortMode = null;
|
||||
SortMode sortMode = null;
|
||||
QueryBuilder<?> nestedFilter = null;
|
||||
String nestedPath = null;
|
||||
|
||||
|
@ -437,7 +437,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
ignoreMalformed = ignore_malformed_value;
|
||||
}
|
||||
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
|
||||
sortMode = MultiValueMode.fromString(parser.text());
|
||||
sortMode = SortMode.fromString(parser.text());
|
||||
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
||||
nestedPath = parser.text();
|
||||
} else {
|
||||
|
@ -454,7 +454,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
result.unit(unit);
|
||||
result.order(order);
|
||||
if (sortMode != null) {
|
||||
result.sortMode(sortMode.name());
|
||||
result.sortMode(sortMode);
|
||||
}
|
||||
result.setNestedFilter(nestedFilter);
|
||||
result.setNestedPath(nestedPath);
|
||||
|
|
|
@ -111,8 +111,11 @@ public class GeoDistanceSortParser implements SortParser {
|
|||
if (coerce == true) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if ("ignore_malformed".equals(currentName) && coerce == false) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else if ("ignore_malformed".equals(currentName)) {
|
||||
boolean ignoreMalformedFlag = parser.booleanValue();
|
||||
if (coerce == false) {
|
||||
ignoreMalformed = ignoreMalformedFlag;
|
||||
}
|
||||
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
|
||||
sortMode = MultiValueMode.fromString(parser.text());
|
||||
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
||||
|
|
|
@ -34,8 +34,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* A sort builder allowing to sort by score.
|
||||
*/
|
||||
public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> implements SortBuilderParser<ScoreSortBuilder>,
|
||||
SortElementParserTemp<ScoreSortBuilder> {
|
||||
public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> implements SortBuilderParser<ScoreSortBuilder> {
|
||||
|
||||
private static final String NAME = "_score";
|
||||
static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder();
|
||||
|
|
|
@ -19,24 +19,48 @@
|
|||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
import org.elasticsearch.script.ScriptParameterParser;
|
||||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Script sort builder allows to sort based on a custom script expression.
|
||||
*/
|
||||
public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
||||
public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> implements SortBuilderParser<ScriptSortBuilder> {
|
||||
|
||||
private Script script;
|
||||
private static final String NAME = "_script";
|
||||
static final ScriptSortBuilder PROTOTYPE = new ScriptSortBuilder(new Script("_na_"), ScriptSortType.STRING);
|
||||
public static final ParseField TYPE_FIELD = new ParseField("type");
|
||||
public static final ParseField SCRIPT_FIELD = new ParseField("script");
|
||||
public static final ParseField SORTMODE_FIELD = new ParseField("mode");
|
||||
public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path");
|
||||
public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter");
|
||||
public static final ParseField PARAMS_FIELD = new ParseField("params");
|
||||
|
||||
private final String type;
|
||||
private final Script script;
|
||||
|
||||
private String sortMode;
|
||||
private ScriptSortType type;
|
||||
|
||||
private QueryBuilder nestedFilter;
|
||||
private SortMode sortMode;
|
||||
|
||||
private QueryBuilder<?> nestedFilter;
|
||||
|
||||
private String nestedPath;
|
||||
|
||||
|
@ -45,21 +69,57 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
*
|
||||
* @param script
|
||||
* The script to use.
|
||||
* @param type
|
||||
* The type of the script, can be either {@link ScriptSortType#STRING} or
|
||||
* {@link ScriptSortType#NUMBER}
|
||||
*/
|
||||
public ScriptSortBuilder(Script script, String type) {
|
||||
public ScriptSortBuilder(Script script, ScriptSortType type) {
|
||||
Objects.requireNonNull(script, "script cannot be null");
|
||||
Objects.requireNonNull(type, "type cannot be null");
|
||||
this.script = script;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
ScriptSortBuilder(ScriptSortBuilder original) {
|
||||
this.script = original.script;
|
||||
this.type = original.type;
|
||||
this.order = original.order;
|
||||
this.sortMode = original.sortMode;
|
||||
this.nestedFilter = original.nestedFilter;
|
||||
this.nestedPath = original.nestedPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the script used in this sort.
|
||||
*/
|
||||
public Script script() {
|
||||
return this.script;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the type used in this sort.
|
||||
*/
|
||||
public ScriptSortType type() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines which distance to use for sorting in the case a document contains multiple geo points.
|
||||
* Possible values: min and max
|
||||
*/
|
||||
public ScriptSortBuilder sortMode(String sortMode) {
|
||||
public ScriptSortBuilder sortMode(SortMode sortMode) {
|
||||
Objects.requireNonNull(sortMode, "sort mode cannot be null.");
|
||||
this.sortMode = sortMode;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the sort mode.
|
||||
*/
|
||||
public SortMode sortMode() {
|
||||
return this.sortMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested filter that the nested objects should match with in order to be taken into account
|
||||
* for sorting.
|
||||
|
@ -69,6 +129,13 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the nested filter.
|
||||
*/
|
||||
public QueryBuilder<?> getNestedFilter() {
|
||||
return this.nestedFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path if sorting occurs on a field that is inside a nested object. For sorting by script this
|
||||
* needs to be specified.
|
||||
|
@ -78,22 +145,200 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the nested path.
|
||||
*/
|
||||
public String getNestedPath() {
|
||||
return this.nestedPath;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException {
|
||||
builder.startObject("_script");
|
||||
builder.field("script", script);
|
||||
builder.field("type", type);
|
||||
builder.startObject(NAME);
|
||||
builder.field(SCRIPT_FIELD.getPreferredName(), script);
|
||||
builder.field(TYPE_FIELD.getPreferredName(), type);
|
||||
builder.field(ORDER_FIELD.getPreferredName(), order);
|
||||
if (sortMode != null) {
|
||||
builder.field("mode", sortMode);
|
||||
builder.field(SORTMODE_FIELD.getPreferredName(), sortMode);
|
||||
}
|
||||
if (nestedPath != null) {
|
||||
builder.field("nested_path", nestedPath);
|
||||
builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath);
|
||||
}
|
||||
if (nestedFilter != null) {
|
||||
builder.field("nested_filter", nestedFilter, builderParams);
|
||||
builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, builderParams);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException {
|
||||
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
|
||||
XContentParser parser = context.parser();
|
||||
ParseFieldMatcher parseField = context.parseFieldMatcher();
|
||||
Script script = null;
|
||||
ScriptSortType type = null;
|
||||
SortMode sortMode = null;
|
||||
SortOrder order = null;
|
||||
QueryBuilder<?> nestedFilter = null;
|
||||
String nestedPath = null;
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentName = parser.currentName();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseField.match(currentName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseField);
|
||||
} else if (parseField.match(currentName, PARAMS_FIELD)) {
|
||||
params = parser.map();
|
||||
} else if (parseField.match(currentName, NESTED_FILTER_FIELD)) {
|
||||
nestedFilter = context.parseInnerQueryBuilder();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseField.match(currentName, ORDER_FIELD)) {
|
||||
order = SortOrder.fromString(parser.text());
|
||||
} else if (scriptParameterParser.token(currentName, token, parser, parseField)) {
|
||||
// Do Nothing (handled by ScriptParameterParser
|
||||
} else if (parseField.match(currentName, TYPE_FIELD)) {
|
||||
type = ScriptSortType.fromString(parser.text());
|
||||
} else if (parseField.match(currentName, SORTMODE_FIELD)) {
|
||||
sortMode = SortMode.fromString(parser.text());
|
||||
} else if (parseField.match(currentName, NESTED_PATH_FIELD)) {
|
||||
nestedPath = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] failed to parse field [" + currentName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] unexpected token [" + token + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (script == null) { // Didn't find anything using the new API so try using the old one instead
|
||||
ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue();
|
||||
if (scriptValue != null) {
|
||||
if (params == null) {
|
||||
params = new HashMap<>();
|
||||
}
|
||||
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
|
||||
}
|
||||
}
|
||||
|
||||
ScriptSortBuilder result = new ScriptSortBuilder(script, type);
|
||||
if (order != null) {
|
||||
result.order(order);
|
||||
}
|
||||
if (sortMode != null) {
|
||||
result.sortMode(sortMode);
|
||||
}
|
||||
if (nestedFilter != null) {
|
||||
result.setNestedFilter(nestedFilter);
|
||||
}
|
||||
if (nestedPath != null) {
|
||||
result.setNestedPath(nestedPath);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
if (this == object) {
|
||||
return true;
|
||||
}
|
||||
if (object == null || getClass() != object.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ScriptSortBuilder other = (ScriptSortBuilder) object;
|
||||
return Objects.equals(script, other.script) &&
|
||||
Objects.equals(type, other.type) &&
|
||||
Objects.equals(order, other.order) &&
|
||||
Objects.equals(sortMode, other.sortMode) &&
|
||||
Objects.equals(nestedFilter, other.nestedFilter) &&
|
||||
Objects.equals(nestedPath, other.nestedPath);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(script, type, order, sortMode, nestedFilter, nestedPath);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
script.writeTo(out);
|
||||
type.writeTo(out);
|
||||
order.writeTo(out);
|
||||
out.writeBoolean(sortMode != null);
|
||||
if (sortMode != null) {
|
||||
sortMode.writeTo(out);
|
||||
}
|
||||
out.writeOptionalString(nestedPath);
|
||||
boolean hasNestedFilter = nestedFilter != null;
|
||||
out.writeBoolean(hasNestedFilter);
|
||||
if (hasNestedFilter) {
|
||||
out.writeQuery(nestedFilter);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptSortBuilder readFrom(StreamInput in) throws IOException {
|
||||
ScriptSortBuilder builder = new ScriptSortBuilder(Script.readScript(in), ScriptSortType.PROTOTYPE.readFrom(in));
|
||||
builder.order(SortOrder.readOrderFrom(in));
|
||||
if (in.readBoolean()) {
|
||||
builder.sortMode(SortMode.PROTOTYPE.readFrom(in));
|
||||
}
|
||||
builder.nestedPath = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
builder.nestedFilter = in.readQuery();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
public enum ScriptSortType implements Writeable<ScriptSortType> {
|
||||
/** script sort for a string value **/
|
||||
STRING,
|
||||
/** script sort for a numeric value **/
|
||||
NUMBER;
|
||||
|
||||
static ScriptSortType PROTOTYPE = STRING;
|
||||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptSortType readFrom(final StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown ScriptSortType ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
public static ScriptSortType fromString(final String str) {
|
||||
Objects.requireNonNull(str, "input string is null");
|
||||
switch (str.toLowerCase(Locale.ROOT)) {
|
||||
case ("string"):
|
||||
return ScriptSortType.STRING;
|
||||
case ("number"):
|
||||
return ScriptSortType.NUMBER;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown ScriptSortType [" + str + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,8 +48,7 @@ import org.elasticsearch.script.ScriptParameterParser;
|
|||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
@ -61,9 +60,6 @@ import java.util.Map;
|
|||
*/
|
||||
public class ScriptSortParser implements SortParser {
|
||||
|
||||
private static final String STRING_SORT_TYPE = "string";
|
||||
private static final String NUMBER_SORT_TYPE = "number";
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[]{"_script"};
|
||||
|
@ -73,7 +69,7 @@ public class ScriptSortParser implements SortParser {
|
|||
public SortField parse(XContentParser parser, QueryShardContext context) throws Exception {
|
||||
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
|
||||
Script script = null;
|
||||
String type = null;
|
||||
ScriptSortType type = null;
|
||||
Map<String, Object> params = null;
|
||||
boolean reverse = false;
|
||||
MultiValueMode sortMode = null;
|
||||
|
@ -103,7 +99,7 @@ public class ScriptSortParser implements SortParser {
|
|||
} else if (scriptParameterParser.token(currentName, token, parser, context.parseFieldMatcher())) {
|
||||
// Do Nothing (handled by ScriptParameterParser
|
||||
} else if ("type".equals(currentName)) {
|
||||
type = parser.text();
|
||||
type = ScriptSortType.fromString(parser.text());
|
||||
} else if ("mode".equals(currentName)) {
|
||||
sortMode = MultiValueMode.fromString(parser.text());
|
||||
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
||||
|
@ -136,7 +132,7 @@ public class ScriptSortParser implements SortParser {
|
|||
final SearchScript searchScript = context.getScriptService().search(
|
||||
context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
|
||||
if (STRING_SORT_TYPE.equals(type) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) {
|
||||
if (ScriptSortType.STRING.equals(type) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "type [string] doesn't support mode [" + sortMode + "]");
|
||||
}
|
||||
|
||||
|
@ -162,7 +158,7 @@ public class ScriptSortParser implements SortParser {
|
|||
|
||||
final IndexFieldData.XFieldComparatorSource fieldComparatorSource;
|
||||
switch (type) {
|
||||
case STRING_SORT_TYPE:
|
||||
case STRING:
|
||||
fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, sortMode, nested) {
|
||||
LeafSearchScript leafScript;
|
||||
@Override
|
||||
|
@ -185,7 +181,7 @@ public class ScriptSortParser implements SortParser {
|
|||
}
|
||||
};
|
||||
break;
|
||||
case NUMBER_SORT_TYPE:
|
||||
case NUMBER:
|
||||
// TODO: should we rather sort missing values last?
|
||||
fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, sortMode, nested) {
|
||||
LeafSearchScript leafScript;
|
||||
|
|
|
@ -21,8 +21,7 @@ package org.elasticsearch.search.sort;
|
|||
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.script.Script;
|
||||
|
||||
import java.util.Arrays;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
|
||||
/**
|
||||
* A set of static factory methods for {@link SortBuilder}s.
|
||||
|
@ -53,7 +52,7 @@ public class SortBuilders {
|
|||
* @param script The script to use.
|
||||
* @param type The type, can either be "string" or "number".
|
||||
*/
|
||||
public static ScriptSortBuilder scriptSort(Script script, String type) {
|
||||
public static ScriptSortBuilder scriptSort(Script script, ScriptSortType type) {
|
||||
return new ScriptSortBuilder(script, type);
|
||||
}
|
||||
|
||||
|
@ -63,12 +62,12 @@ public class SortBuilders {
|
|||
* @param fieldName The geo point like field name.
|
||||
* @param lat Latitude of the point to create the range distance facets from.
|
||||
* @param lon Longitude of the point to create the range distance facets from.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, double lat, double lon) {
|
||||
return new GeoDistanceSortBuilder(fieldName, lat, lon);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Constructs a new distance based sort on a geo point like field.
|
||||
*
|
||||
|
@ -87,5 +86,5 @@ public class SortBuilders {
|
|||
*/
|
||||
public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, String ... geohashes) {
|
||||
return new GeoDistanceSortBuilder(fieldName, geohashes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Elasticsearch supports sorting by array or multi-valued fields. The SortMode option controls what array value is picked
|
||||
* for sorting the document it belongs to. The mode option can have the following values:
|
||||
* <ul>
|
||||
* <li>min - Pick the lowest value.</li>
|
||||
* <li>max - Pick the highest value.</li>
|
||||
* <li>sum - Use the sum of all values as sort value. Only applicable for number based array fields.</li>
|
||||
* <li>avg - Use the average of all values as sort value. Only applicable for number based array fields.</li>
|
||||
* <li>median - Use the median of all values as sort value. Only applicable for number based array fields.</li>
|
||||
* </ul>
|
||||
*/
|
||||
public enum SortMode implements Writeable<SortMode> {
|
||||
/** pick the lowest value **/
|
||||
MIN,
|
||||
/** pick the highest value **/
|
||||
MAX,
|
||||
/** Use the sum of all values as sort value. Only applicable for number based array fields. **/
|
||||
SUM,
|
||||
/** Use the average of all values as sort value. Only applicable for number based array fields. **/
|
||||
AVG,
|
||||
/** Use the median of all values as sort value. Only applicable for number based array fields. **/
|
||||
MEDIAN;
|
||||
|
||||
static SortMode PROTOTYPE = MIN;
|
||||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortMode readFrom(final StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown SortMode ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
public static SortMode fromString(final String str) {
|
||||
Objects.requireNonNull(str, "input string is null");
|
||||
switch (str.toLowerCase(Locale.ROOT)) {
|
||||
case ("min"):
|
||||
return MIN;
|
||||
case ("max"):
|
||||
return MAX;
|
||||
case ("sum"):
|
||||
return SUM;
|
||||
case ("avg"):
|
||||
return AVG;
|
||||
case ("median"):
|
||||
return MEDIAN;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown SortMode [" + str + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
|
@ -72,9 +72,6 @@ grant {
|
|||
// set by ESTestCase to improve test reproducibility
|
||||
// TODO: set this with gradle or some other way that repros with seed?
|
||||
permission java.util.PropertyPermission "es.processors.override", "write";
|
||||
// set by CLIToolTestCase
|
||||
// TODO: do this differently? or test commandline tools differently?
|
||||
permission java.util.PropertyPermission "es.default.path.home", "write";
|
||||
|
||||
// TODO: these simply trigger a noisy warning if its unable to clear the properties
|
||||
// fix that in randomizedtesting
|
||||
|
|
|
@ -260,4 +260,20 @@ public class VersionTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// this test ensures we never bump the lucene version in a bugfix release
|
||||
public void testLuceneVersionIsSameOnMinorRelease() {
|
||||
for (Version version : VersionUtils.allVersions()) {
|
||||
for (Version other : VersionUtils.allVersions()) {
|
||||
if (other.onOrAfter(version)) {
|
||||
assertTrue("lucene versions must be " + other + " >= " + version,
|
||||
other.luceneVersion.onOrAfter(version.luceneVersion));
|
||||
}
|
||||
if (other.major == version.major && other.minor == version.minor) {
|
||||
assertEquals(other.luceneVersion.major, version.luceneVersion.major);
|
||||
assertEquals(other.luceneVersion.minor, version.luceneVersion.minor);
|
||||
// should we also assert the lucene bugfix version?
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,191 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
|
||||
public class ElasticsearchCliTests extends ESTestCase {
|
||||
|
||||
public void testVersion() throws Exception {
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-d");
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "--daemonize");
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-p", "/tmp/pid");
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "--pidfile", "/tmp/pid");
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-d");
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--daemonize");
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "-p", "/tmp/pid");
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("--version", "--pidfile", "/tmp/pid");
|
||||
runTestThatVersionIsReturned("-V");
|
||||
runTestThatVersionIsReturned("--version");
|
||||
}
|
||||
|
||||
private void runTestThatVersionIsMutuallyExclusiveToOtherOptions(String... args) throws Exception {
|
||||
runTestVersion(
|
||||
ExitCodes.USAGE,
|
||||
output -> assertThat(
|
||||
output,
|
||||
containsString("ERROR: Elasticsearch version option is mutually exclusive with any other option")),
|
||||
args);
|
||||
}
|
||||
|
||||
private void runTestThatVersionIsReturned(String... args) throws Exception {
|
||||
runTestVersion(ExitCodes.OK, output -> {
|
||||
assertThat(output, containsString("Version: " + Version.CURRENT.toString()));
|
||||
assertThat(output, containsString("Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date()));
|
||||
assertThat(output, containsString("JVM: " + JvmInfo.jvmInfo().version()));
|
||||
}, args);
|
||||
}
|
||||
|
||||
private void runTestVersion(int expectedStatus, Consumer<String> outputConsumer, String... args) throws Exception {
|
||||
runTest(expectedStatus, false, outputConsumer, (foreground, pidFile, esSettings) -> {}, args);
|
||||
}
|
||||
|
||||
public void testThatPidFileCanBeConfigured() throws Exception {
|
||||
runPidFileTest(ExitCodes.USAGE, false, output -> assertThat(output, containsString("Option p/pidfile requires an argument")), "-p");
|
||||
runPidFileTest(ExitCodes.OK, true, output -> {}, "-p", "/tmp/pid");
|
||||
runPidFileTest(ExitCodes.OK, true, output -> {}, "--pidfile", "/tmp/pid");
|
||||
}
|
||||
|
||||
private void runPidFileTest(final int expectedStatus, final boolean expectedInit, Consumer<String> outputConsumer, final String... args)
|
||||
throws Exception {
|
||||
runTest(
|
||||
expectedStatus,
|
||||
expectedInit,
|
||||
outputConsumer,
|
||||
(foreground, pidFile, esSettings) -> assertThat(pidFile, equalTo("/tmp/pid")),
|
||||
args);
|
||||
}
|
||||
|
||||
public void testThatParsingDaemonizeWorks() throws Exception {
|
||||
runDaemonizeTest(true, "-d");
|
||||
runDaemonizeTest(true, "--daemonize");
|
||||
runDaemonizeTest(false);
|
||||
}
|
||||
|
||||
private void runDaemonizeTest(final boolean expectedDaemonize, final String... args) throws Exception {
|
||||
runTest(
|
||||
ExitCodes.OK,
|
||||
true,
|
||||
output -> {},
|
||||
(foreground, pidFile, esSettings) -> assertThat(foreground, equalTo(!expectedDaemonize)),
|
||||
args);
|
||||
}
|
||||
|
||||
public void testElasticsearchSettings() throws Exception {
|
||||
runTest(
|
||||
ExitCodes.OK,
|
||||
true,
|
||||
output -> {},
|
||||
(foreground, pidFile, esSettings) -> {
|
||||
assertThat(esSettings.size(), equalTo(2));
|
||||
assertThat(esSettings, hasEntry("es.foo", "bar"));
|
||||
assertThat(esSettings, hasEntry("es.baz", "qux"));
|
||||
},
|
||||
"-Ees.foo=bar", "-E", "es.baz=qux"
|
||||
);
|
||||
}
|
||||
|
||||
public void testElasticsearchSettingPrefix() throws Exception {
|
||||
runElasticsearchSettingPrefixTest("-E", "foo");
|
||||
runElasticsearchSettingPrefixTest("-E", "foo=bar");
|
||||
runElasticsearchSettingPrefixTest("-E", "=bar");
|
||||
}
|
||||
|
||||
private void runElasticsearchSettingPrefixTest(String... args) throws Exception {
|
||||
runTest(
|
||||
ExitCodes.USAGE,
|
||||
false,
|
||||
output -> assertThat(output, containsString("Elasticsearch settings must be prefixed with [es.] but was [")),
|
||||
(foreground, pidFile, esSettings) -> {},
|
||||
args
|
||||
);
|
||||
}
|
||||
|
||||
public void testElasticsearchSettingCanNotBeEmpty() throws Exception {
|
||||
runTest(
|
||||
ExitCodes.USAGE,
|
||||
false,
|
||||
output -> assertThat(output, containsString("Elasticsearch setting [es.foo] must not be empty")),
|
||||
(foreground, pidFile, esSettings) -> {},
|
||||
"-E", "es.foo="
|
||||
);
|
||||
}
|
||||
|
||||
public void testUnknownOption() throws Exception {
|
||||
runTest(
|
||||
ExitCodes.USAGE,
|
||||
false,
|
||||
output -> assertThat(output, containsString("network.host is not a recognized option")),
|
||||
(foreground, pidFile, esSettings) -> {},
|
||||
"--network.host");
|
||||
}
|
||||
|
||||
private interface InitConsumer {
|
||||
void accept(final boolean foreground, final String pidFile, final Map<String, String> esSettings);
|
||||
}
|
||||
|
||||
private void runTest(
|
||||
final int expectedStatus,
|
||||
final boolean expectedInit,
|
||||
final Consumer<String> outputConsumer,
|
||||
final InitConsumer initConsumer,
|
||||
String... args) throws Exception {
|
||||
final MockTerminal terminal = new MockTerminal();
|
||||
try {
|
||||
final AtomicBoolean init = new AtomicBoolean();
|
||||
final int status = Elasticsearch.main(args, new Elasticsearch() {
|
||||
@Override
|
||||
void init(final boolean daemonize, final String pidFile, final Map<String, String> esSettings) {
|
||||
init.set(true);
|
||||
initConsumer.accept(!daemonize, pidFile, esSettings);
|
||||
}
|
||||
}, terminal);
|
||||
assertThat(status, equalTo(expectedStatus));
|
||||
assertThat(init.get(), equalTo(expectedInit));
|
||||
outputConsumer.accept(terminal.getOutput());
|
||||
} catch (Throwable t) {
|
||||
// if an unexpected exception is thrown, we log
|
||||
// terminal output to aid debugging
|
||||
logger.info(terminal.getOutput());
|
||||
// rethrow so the test fails
|
||||
throw t;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -166,7 +166,7 @@ public class XContentBuilderTests extends ESTestCase {
|
|||
|
||||
byte[] data = bos.bytes().toBytes();
|
||||
String sData = new String(data, "UTF8");
|
||||
System.out.println("DATA: " + sData);
|
||||
assertThat(sData, equalTo("{\"name\":\"something\", source : { test : \"value\" },\"name2\":\"something2\"}"));
|
||||
}
|
||||
|
||||
public void testFieldCaseConversion() throws Exception {
|
||||
|
|
|
@ -181,7 +181,7 @@ public class NettyHttpServerPipeliningTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
|
||||
e.getCause().printStackTrace();
|
||||
logger.info("Caught exception", e.getCause());
|
||||
e.getChannel().close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -485,7 +485,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
|
||||
if (flush) {
|
||||
// we should have had just 1 merge, so last generation should be exact
|
||||
assertEquals(gen2 + 1, store.readLastCommittedSegmentsInfo().getLastGeneration());
|
||||
assertEquals(gen2, store.readLastCommittedSegmentsInfo().getLastGeneration());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -843,7 +843,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
Engine.SyncedFlushResult.SUCCESS);
|
||||
assertEquals(3, engine.segments(false).size());
|
||||
|
||||
engine.forceMerge(false, 1, false, false, false);
|
||||
engine.forceMerge(forceMergeFlushes, 1, false, false, false);
|
||||
if (forceMergeFlushes == false) {
|
||||
engine.refresh("make all segments visible");
|
||||
assertEquals(4, engine.segments(false).size());
|
||||
|
@ -867,7 +867,7 @@ public class InternalEngineTests extends ESTestCase {
|
|||
assertEquals(engine.getLastWriteNanos(), delete.startTime());
|
||||
}
|
||||
assertFalse(engine.tryRenewSyncCommit());
|
||||
engine.flush();
|
||||
engine.flush(false, true); // we might hit a concurrent flush from a finishing merge here - just wait if ongoing...
|
||||
assertNull(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID));
|
||||
assertNull(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID));
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||
* test that missing "filter" element causes {@link ParsingException}
|
||||
*/
|
||||
public void testFilterElement() throws IOException {
|
||||
String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {}";
|
||||
String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {} }";
|
||||
try {
|
||||
parseQuery(queryString);
|
||||
fail("Expected ParsingException");
|
||||
|
@ -63,6 +63,38 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test that multiple "filter" elements causes {@link ParsingException}
|
||||
*/
|
||||
public void testMultipleFilterElements() throws IOException {
|
||||
String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {\n" +
|
||||
"\"filter\" : { \"term\": { \"foo\": \"a\" } },\n" +
|
||||
"\"filter\" : { \"term\": { \"foo\": \"x\" } },\n" +
|
||||
"} }";
|
||||
try {
|
||||
parseQuery(queryString);
|
||||
fail("Expected ParsingException");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("accepts only one 'filter' element"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test that "filter" does not accept an array of queries, throws {@link ParsingException}
|
||||
*/
|
||||
public void testNoArrayAsFilterElements() throws IOException {
|
||||
String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {\n" +
|
||||
"\"filter\" : [ { \"term\": { \"foo\": \"a\" } },\n" +
|
||||
"{ \"term\": { \"foo\": \"x\" } } ]\n" +
|
||||
"} }";
|
||||
try {
|
||||
parseQuery(queryString);
|
||||
fail("Expected ParsingException");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("unexpected token [START_ARRAY]"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new ConstantScoreQueryBuilder(null);
|
||||
|
@ -79,16 +111,16 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||
|
||||
public void testFromJson() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"constant_score\" : {\n" +
|
||||
" \"filter\" : {\n" +
|
||||
" \"terms\" : {\n" +
|
||||
" \"user\" : [ \"kimchy\", \"elasticsearch\" ],\n" +
|
||||
" \"boost\" : 42.0\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"boost\" : 23.0\n" +
|
||||
" }\n" +
|
||||
"{\n" +
|
||||
" \"constant_score\" : {\n" +
|
||||
" \"filter\" : {\n" +
|
||||
" \"terms\" : {\n" +
|
||||
" \"user\" : [ \"kimchy\", \"elasticsearch\" ],\n" +
|
||||
" \"boost\" : 42.0\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"boost\" : 23.0\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
ConstantScoreQueryBuilder parsed = (ConstantScoreQueryBuilder) parseQuery(json);
|
||||
|
|
|
@ -1491,7 +1491,7 @@ public class TranslogTests extends ESTestCase {
|
|||
if (writtenOperations.size() != snapshot.totalOperations()) {
|
||||
for (int i = 0; i < threadCount; i++) {
|
||||
if (threadExceptions[i] != null) {
|
||||
threadExceptions[i].printStackTrace();
|
||||
logger.info("Translog exception", threadExceptions[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -281,7 +281,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase {
|
|||
controller.assertNotThrottled(shard0);
|
||||
controller.assertThrottled(shard1);
|
||||
|
||||
System.out.println("TEST: now index more");
|
||||
logger.info("--> Indexing more data");
|
||||
|
||||
// More indexing to shard0
|
||||
controller.simulateIndexing(shard0);
|
||||
|
|
|
@ -81,6 +81,8 @@ public class IndicesServiceTests extends ESSingleNodeTestCase {
|
|||
assertFalse("shard is allocated", indicesService.canDeleteShardContent(shardId, test.getIndexSettings()));
|
||||
test.removeShard(0, "boom");
|
||||
assertTrue("shard is removed", indicesService.canDeleteShardContent(shardId, test.getIndexSettings()));
|
||||
ShardId notAllocated = new ShardId(test.index(), 100);
|
||||
assertFalse("shard that was never on this node should NOT be deletable", indicesService.canDeleteShardContent(notAllocated, test.getIndexSettings()));
|
||||
}
|
||||
|
||||
public void testDeleteIndexStore() throws Exception {
|
||||
|
|
|
@ -54,7 +54,7 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
*
|
||||
*/
|
||||
public class ConcurrentPercolatorIT extends ESIntegTestCase {
|
||||
public void testSimpleConcurrentPercolator() throws Exception {
|
||||
public void testSimpleConcurrentPercolator() throws Throwable {
|
||||
// We need to index a document / define mapping, otherwise field1 doesn't get recognized as number field.
|
||||
// If we don't do this, then 'test2' percolate query gets parsed as a TermQuery and not a RangeQuery.
|
||||
// The percolate api doesn't parse the doc if no queries have registered, so it can't lazily create a mapping
|
||||
|
@ -143,9 +143,8 @@ public class ConcurrentPercolatorIT extends ESIntegTestCase {
|
|||
|
||||
Throwable assertionError = exceptionHolder.get();
|
||||
if (assertionError != null) {
|
||||
assertionError.printStackTrace();
|
||||
throw assertionError;
|
||||
}
|
||||
assertThat(assertionError + " should be null", assertionError, nullValue());
|
||||
}
|
||||
|
||||
public void testConcurrentAddingAndPercolating() throws Exception {
|
||||
|
|
|
@ -218,7 +218,7 @@ public abstract class BasePipelineAggregationTestCase<AF extends PipelineAggrega
|
|||
AF testAgg = createTestAggregatorFactory();
|
||||
AggregatorFactories.Builder factoriesBuilder = AggregatorFactories.builder().skipResolveOrder().addPipelineAggregator(testAgg);
|
||||
String contentString = factoriesBuilder.toString();
|
||||
System.out.println(contentString);
|
||||
logger.info("Content string: {}", contentString);
|
||||
XContentParser parser = XContentFactory.xContent(contentString).createParser(contentString);
|
||||
QueryParseContext parseContext = new QueryParseContext(queriesRegistry);
|
||||
parseContext.reset(parser);
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
|
|||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilderTests;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
|
@ -132,7 +133,7 @@ public class TopHitsTests extends BaseAggregationTestCase<TopHitsAggregatorBuild
|
|||
factory.sort(SortBuilders.scoreSort().order(randomFrom(SortOrder.values())));
|
||||
break;
|
||||
case 3:
|
||||
factory.sort(SortBuilders.scriptSort(new Script("foo"), "number").order(randomFrom(SortOrder.values())));
|
||||
factory.sort(SortBuilders.scriptSort(new Script("foo"), ScriptSortType.NUMBER).order(randomFrom(SortOrder.values())));
|
||||
break;
|
||||
case 4:
|
||||
factory.sort(randomAsciiOfLengthBetween(5, 20));
|
||||
|
|
|
@ -74,6 +74,7 @@ import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
|||
import org.elasticsearch.search.highlight.HighlightBuilderTests;
|
||||
import org.elasticsearch.search.rescore.QueryRescoreBuilderTests;
|
||||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilderTests;
|
||||
|
@ -341,7 +342,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
|||
builder.sort(SortBuilders.scoreSort().order(randomFrom(SortOrder.values())));
|
||||
break;
|
||||
case 3:
|
||||
builder.sort(SortBuilders.scriptSort(new Script("foo"), "number").order(randomFrom(SortOrder.values())));
|
||||
builder.sort(SortBuilders.scriptSort(new Script("foo"), ScriptSortType.NUMBER).order(randomFrom(SortOrder.values())));
|
||||
break;
|
||||
case 4:
|
||||
builder.sort(randomAsciiOfLengthBetween(5, 20));
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.action.termvectors.TermVectorsRequest;
|
||||
import org.elasticsearch.action.termvectors.TermVectorsResponse;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
|
@ -173,7 +174,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
|
|||
}
|
||||
hitField.values().add(tv);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
ESLoggerFactory.getLogger(FetchSubPhasePluginIT.class.getName()).info("Swallowed exception", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.geo;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
|
@ -560,7 +561,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
strategy.makeQuery(args);
|
||||
return true;
|
||||
} catch (UnsupportedSpatialOperation e) {
|
||||
e.printStackTrace();
|
||||
ESLoggerFactory.getLogger(GeoFilterIT.class.getName()).info("Unsupported spatial operation {}", e, relation);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -400,7 +400,6 @@ public class HighlightBuilderTests extends ESTestCase {
|
|||
context.reset(parser);
|
||||
highlightBuilder = HighlightBuilder.PROTOTYPE.fromXContent(context);
|
||||
assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder);
|
||||
System.out.println(Math.log(1/(double)(1+1)) + 1.0);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortMode;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
|
@ -748,7 +749,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
.addSort(
|
||||
SortBuilders.fieldSort("parent.child.child_values")
|
||||
.setNestedPath("parent.child")
|
||||
.sortMode("sum")
|
||||
.sortMode(SortMode.SUM)
|
||||
.order(SortOrder.ASC)
|
||||
)
|
||||
.execute().actionGet();
|
||||
|
@ -768,7 +769,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
.addSort(
|
||||
SortBuilders.fieldSort("parent.child.child_values")
|
||||
.setNestedPath("parent.child")
|
||||
.sortMode("sum")
|
||||
.sortMode(SortMode.SUM)
|
||||
.order(SortOrder.DESC)
|
||||
)
|
||||
.execute().actionGet();
|
||||
|
@ -789,7 +790,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
SortBuilders.fieldSort("parent.child.child_values")
|
||||
.setNestedPath("parent.child")
|
||||
.setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true))
|
||||
.sortMode("sum")
|
||||
.sortMode(SortMode.SUM)
|
||||
.order(SortOrder.ASC)
|
||||
)
|
||||
.execute().actionGet();
|
||||
|
@ -809,7 +810,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
.addSort(
|
||||
SortBuilders.fieldSort("parent.child.child_values")
|
||||
.setNestedPath("parent.child")
|
||||
.sortMode("avg")
|
||||
.sortMode(SortMode.AVG)
|
||||
.order(SortOrder.ASC)
|
||||
)
|
||||
.execute().actionGet();
|
||||
|
@ -828,7 +829,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
.addSort(
|
||||
SortBuilders.fieldSort("parent.child.child_values")
|
||||
.setNestedPath("parent.child")
|
||||
.sortMode("avg")
|
||||
.sortMode(SortMode.AVG)
|
||||
.order(SortOrder.DESC)
|
||||
)
|
||||
.execute().actionGet();
|
||||
|
@ -849,7 +850,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
SortBuilders.fieldSort("parent.child.child_values")
|
||||
.setNestedPath("parent.child")
|
||||
.setNestedFilter(QueryBuilders.termQuery("parent.child.filter", true))
|
||||
.sortMode("avg")
|
||||
.sortMode(SortMode.AVG)
|
||||
.order(SortOrder.ASC)
|
||||
)
|
||||
.execute().actionGet();
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
|
@ -55,6 +54,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder & SortBuilderPa
|
|||
namedWriteableRegistry = new NamedWriteableRegistry();
|
||||
namedWriteableRegistry.registerPrototype(SortBuilder.class, GeoDistanceSortBuilder.PROTOTYPE);
|
||||
namedWriteableRegistry.registerPrototype(SortBuilder.class, ScoreSortBuilder.PROTOTYPE);
|
||||
namedWriteableRegistry.registerPrototype(SortBuilder.class, ScriptSortBuilder.PROTOTYPE);
|
||||
namedWriteableRegistry.registerPrototype(SortBuilder.class, FieldSortBuilder.PROTOTYPE);
|
||||
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry();
|
||||
}
|
||||
|
@ -155,7 +155,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder & SortBuilderPa
|
|||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
|
||||
T prototype = (T) namedWriteableRegistry.getPrototype(SortBuilder.class,
|
||||
original.getWriteableName());
|
||||
T copy = (T) prototype.readFrom(in);
|
||||
T copy = prototype.readFrom(in);
|
||||
return copy;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,44 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -80,6 +42,43 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class FieldSortIT extends ESIntegTestCase {
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
|
@ -985,7 +984,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(10)
|
||||
.addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode("sum"))
|
||||
.addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
|
|
|
@ -270,7 +270,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
|
||||
// Order: Asc, Mode: max
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max"))
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
|
@ -296,7 +296,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
|
||||
// Order: Desc, Mode: min
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min"))
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
|
@ -308,7 +308,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
|
@ -320,7 +320,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(5301d, 10d));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC))
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.DESC))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
|
@ -333,7 +333,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("sum"));
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.SUM));
|
||||
fail("sum should not be supported for sorting by geo distance");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
|
@ -455,7 +455,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
// Order: Asc, Mode: max
|
||||
searchResponse = client()
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location",
|
||||
40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max").setNestedPath("branches"))
|
||||
40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
|
@ -480,7 +480,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
// Order: Desc, Mode: min
|
||||
searchResponse = client()
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location",
|
||||
40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min").setNestedPath("branches"))
|
||||
40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
|
@ -492,7 +492,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client()
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location",
|
||||
40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC).setNestedPath("branches"))
|
||||
40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
|
@ -504,7 +504,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch("companies")
|
||||
.setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731)
|
||||
.setNestedPath("branches").sortMode("avg").order(SortOrder.DESC).setNestedPath("branches"))
|
||||
.setNestedPath("branches").sortMode(SortMode.AVG).order(SortOrder.DESC).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
|
@ -517,7 +517,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731)
|
||||
.setNestedFilter(termQuery("branches.name", "brooklyn"))
|
||||
.sortMode("avg").order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.sortMode(SortMode.AVG).order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
assertHitCount(searchResponse, 4);
|
||||
assertFirstHit(searchResponse, hasId("4"));
|
||||
|
@ -529,7 +529,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
|
||||
try {
|
||||
client().prepareSearch("companies").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731).sortMode("sum")
|
||||
.addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731).sortMode(SortMode.SUM)
|
||||
.setNestedPath("branches"));
|
||||
fail("Sum should not be allowed as sort mode");
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
@ -567,11 +567,11 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
assertHitCount(result, 1);
|
||||
}
|
||||
|
||||
private double randomLon() {
|
||||
private static double randomLon() {
|
||||
return randomDouble() * 360 - 180;
|
||||
}
|
||||
|
||||
private double randomLat() {
|
||||
private static double randomLat() {
|
||||
return randomDouble() * 180 - 90;
|
||||
}
|
||||
|
||||
|
@ -619,7 +619,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private long assertDuelOptimization(SearchResponse resp) {
|
||||
private static long assertDuelOptimization(SearchResponse resp) {
|
||||
long matches = -1;
|
||||
assertSearchResponse(resp);
|
||||
if (matches < 0) {
|
||||
|
|
|
@ -95,7 +95,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(new GeoDistanceSortBuilder("location", q).sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d1", "d2");
|
||||
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
@ -103,7 +103,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(new GeoDistanceSortBuilder("location", q).sortMode("min").order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MIN).order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d2", "d1");
|
||||
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
@ -111,7 +111,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(new GeoDistanceSortBuilder("location", q).sortMode("max").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MAX).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d1", "d2");
|
||||
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
@ -119,7 +119,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(new GeoDistanceSortBuilder("location", q).sortMode("max").order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MAX).order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d2", "d1");
|
||||
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
@ -194,7 +194,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d1", "d2");
|
||||
assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2.5, 1, 2, 1, DistanceUnit.KILOMETERS), 1.e-4));
|
||||
|
@ -202,7 +202,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(geoDistanceSortBuilder.sortMode("max").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(geoDistanceSortBuilder.sortMode(SortMode.MAX).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d1", "d2");
|
||||
assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(3.25, 4, 2, 1, DistanceUnit.KILOMETERS), 1.e-4));
|
||||
|
@ -223,7 +223,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
checkCorrectSortOrderForGeoSort(searchResponse);
|
||||
|
||||
|
@ -231,7 +231,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
checkCorrectSortOrderForGeoSort(searchResponse);
|
||||
|
||||
|
@ -239,7 +239,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
checkCorrectSortOrderForGeoSort(searchResponse);
|
||||
|
||||
|
@ -263,9 +263,17 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
|
|||
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0)
|
||||
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet();
|
||||
checkCorrectSortOrderForGeoSort(searchResponse);
|
||||
|
||||
searchResponse = client()
|
||||
.prepareSearch()
|
||||
.setSource(
|
||||
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0)
|
||||
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE)
|
||||
.ignoreMalformed(true).coerce(true))).execute().actionGet();
|
||||
checkCorrectSortOrderForGeoSort(searchResponse);
|
||||
}
|
||||
|
||||
private void checkCorrectSortOrderForGeoSort(SearchResponse searchResponse) {
|
||||
private static void checkCorrectSortOrderForGeoSort(SearchResponse searchResponse) {
|
||||
assertOrderedSearchHits(searchResponse, "d2", "d1");
|
||||
assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 1, 2, DistanceUnit.KILOMETERS), 1.e-4));
|
||||
assertThat((Double) searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 1, 1, DistanceUnit.KILOMETERS), 1.e-4));
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
|||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.geo.RandomGeoGenerator;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -90,16 +89,15 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
return result;
|
||||
}
|
||||
|
||||
private static String mode(String original) {
|
||||
String[] modes = {"MIN", "MAX", "AVG"};
|
||||
String mode = ESTestCase.randomFrom(modes);
|
||||
while (mode.equals(original)) {
|
||||
mode = ESTestCase.randomFrom(modes);
|
||||
}
|
||||
return mode;
|
||||
private static SortMode mode(SortMode original) {
|
||||
SortMode result;
|
||||
do {
|
||||
result = randomFrom(SortMode.values());
|
||||
} while (result == SortMode.SUM || result == original);
|
||||
return result;
|
||||
}
|
||||
|
||||
private DistanceUnit unit(DistanceUnit original) {
|
||||
private static DistanceUnit unit(DistanceUnit original) {
|
||||
int id = -1;
|
||||
while (id == -1 || (original != null && original.ordinal() == id)) {
|
||||
id = randomIntBetween(0, DistanceUnit.values().length - 1);
|
||||
|
@ -107,7 +105,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
return DistanceUnit.values()[id];
|
||||
}
|
||||
|
||||
private GeoPoint[] points(GeoPoint[] original) {
|
||||
private static GeoPoint[] points(GeoPoint[] original) {
|
||||
GeoPoint[] result = null;
|
||||
while (result == null || Arrays.deepEquals(original, result)) {
|
||||
int count = randomIntBetween(1, 10);
|
||||
|
@ -119,7 +117,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
return result;
|
||||
}
|
||||
|
||||
private GeoDistance geoDistance(GeoDistance original) {
|
||||
private static GeoDistance geoDistance(GeoDistance original) {
|
||||
int id = -1;
|
||||
while (id == -1 || (original != null && original.ordinal() == id)) {
|
||||
id = randomIntBetween(0, GeoDistance.values().length - 1);
|
||||
|
@ -177,7 +175,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
GeoPoint point = RandomGeoGenerator.randomPoint(getRandom());
|
||||
builder.point(point.getLat(), point.getLon());
|
||||
try {
|
||||
builder.sortMode("SUM");
|
||||
builder.sortMode(SortMode.SUM);
|
||||
fail("sort mode sum should not be supported");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// all good
|
||||
|
|
|
@ -60,15 +60,14 @@ public class RandomSortDataGenerator {
|
|||
return nestedPath;
|
||||
}
|
||||
|
||||
public static String mode(String original) {
|
||||
String[] modes = {"min", "max", "avg", "sum"};
|
||||
String mode = ESTestCase.randomFrom(modes);
|
||||
public static SortMode mode(SortMode original) {
|
||||
SortMode mode = ESTestCase.randomFrom(SortMode.values());
|
||||
while (mode.equals(original)) {
|
||||
mode = ESTestCase.randomFrom(modes);
|
||||
mode = ESTestCase.randomFrom(SortMode.values());
|
||||
}
|
||||
return mode;
|
||||
}
|
||||
|
||||
|
||||
public static Object missing(Object original) {
|
||||
Object missing = null;
|
||||
Object otherMissing = null;
|
||||
|
@ -95,12 +94,12 @@ public class RandomSortDataGenerator {
|
|||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Unknown missing type.");
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
return missing;
|
||||
}
|
||||
|
||||
|
||||
public static SortOrder order(SortOrder original) {
|
||||
SortOrder order = SortOrder.ASC;
|
||||
if (order.equals(original)) {
|
||||
|
|
|
@ -0,0 +1,241 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuilder> {
|
||||
|
||||
@Override
|
||||
protected ScriptSortBuilder createTestItem() {
|
||||
ScriptSortBuilder builder = new ScriptSortBuilder(new Script(randomAsciiOfLengthBetween(5, 10)),
|
||||
randomBoolean() ? ScriptSortType.NUMBER : ScriptSortType.STRING);
|
||||
if (randomBoolean()) {
|
||||
builder.order(RandomSortDataGenerator.order(builder.order()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.sortMode(RandomSortDataGenerator.mode(builder.sortMode()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setNestedFilter(RandomSortDataGenerator.nestedFilter(builder.getNestedFilter()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setNestedPath(RandomSortDataGenerator.randomAscii(builder.getNestedPath()));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ScriptSortBuilder mutate(ScriptSortBuilder original) throws IOException {
|
||||
ScriptSortBuilder result;
|
||||
if (randomBoolean()) {
|
||||
// change one of the constructor args, copy the rest over
|
||||
Script script = original.script();
|
||||
ScriptSortType type = original.type();
|
||||
if (randomBoolean()) {
|
||||
result = new ScriptSortBuilder(new Script(script.getScript() + "_suffix"), type);
|
||||
} else {
|
||||
result = new ScriptSortBuilder(script, type.equals(ScriptSortType.NUMBER) ? ScriptSortType.STRING : ScriptSortType.NUMBER);
|
||||
}
|
||||
result.order(original.order());
|
||||
if (original.sortMode() != null) {
|
||||
result.sortMode(original.sortMode());
|
||||
}
|
||||
result.setNestedFilter(original.getNestedFilter());
|
||||
result.setNestedPath(original.getNestedPath());
|
||||
return result;
|
||||
}
|
||||
result = new ScriptSortBuilder(original);
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
if (original.order() == SortOrder.ASC) {
|
||||
result.order(SortOrder.DESC);
|
||||
} else {
|
||||
result.order(SortOrder.ASC);
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
result.sortMode(RandomSortDataGenerator.mode(original.sortMode()));
|
||||
break;
|
||||
case 2:
|
||||
result.setNestedFilter(RandomSortDataGenerator.nestedFilter(original.getNestedFilter()));
|
||||
break;
|
||||
case 3:
|
||||
result.setNestedPath(original.getNestedPath() + "_some_suffix");
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Rule
|
||||
public ExpectedException exceptionRule = ExpectedException.none();
|
||||
|
||||
public void testScriptSortType() {
|
||||
// we rely on these ordinals in serialization, so changing them breaks bwc.
|
||||
assertEquals(0, ScriptSortType.STRING.ordinal());
|
||||
assertEquals(1, ScriptSortType.NUMBER.ordinal());
|
||||
|
||||
assertEquals("string", ScriptSortType.STRING.toString());
|
||||
assertEquals("number", ScriptSortType.NUMBER.toString());
|
||||
|
||||
assertEquals(ScriptSortType.STRING, ScriptSortType.fromString("string"));
|
||||
assertEquals(ScriptSortType.STRING, ScriptSortType.fromString("String"));
|
||||
assertEquals(ScriptSortType.STRING, ScriptSortType.fromString("STRING"));
|
||||
assertEquals(ScriptSortType.NUMBER, ScriptSortType.fromString("number"));
|
||||
assertEquals(ScriptSortType.NUMBER, ScriptSortType.fromString("Number"));
|
||||
assertEquals(ScriptSortType.NUMBER, ScriptSortType.fromString("NUMBER"));
|
||||
}
|
||||
|
||||
public void testScriptSortTypeNull() {
|
||||
exceptionRule.expect(NullPointerException.class);
|
||||
exceptionRule.expectMessage("input string is null");
|
||||
ScriptSortType.fromString(null);
|
||||
}
|
||||
|
||||
public void testScriptSortTypeIllegalArgument() {
|
||||
exceptionRule.expect(IllegalArgumentException.class);
|
||||
exceptionRule.expectMessage("Unknown ScriptSortType [xyz]");
|
||||
ScriptSortType.fromString("xyz");
|
||||
}
|
||||
|
||||
public void testParseJson() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
String scriptSort = "{\n" +
|
||||
"\"_script\" : {\n" +
|
||||
"\"type\" : \"number\",\n" +
|
||||
"\"script\" : {\n" +
|
||||
"\"inline\": \"doc['field_name'].value * factor\",\n" +
|
||||
"\"params\" : {\n" +
|
||||
"\"factor\" : 1.1\n" +
|
||||
"}\n" +
|
||||
"},\n" +
|
||||
"\"mode\" : \"max\",\n" +
|
||||
"\"order\" : \"asc\"\n" +
|
||||
"} }\n";
|
||||
XContentParser parser = XContentFactory.xContent(scriptSort).createParser(scriptSort);
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
|
||||
context.reset(parser);
|
||||
ScriptSortBuilder builder = ScriptSortBuilder.PROTOTYPE.fromXContent(context, null);
|
||||
assertEquals("doc['field_name'].value * factor", builder.script().getScript());
|
||||
assertNull(builder.script().getLang());
|
||||
assertEquals(1.1, builder.script().getParams().get("factor"));
|
||||
assertEquals(ScriptType.INLINE, builder.script().getType());
|
||||
assertEquals(ScriptSortType.NUMBER, builder.type());
|
||||
assertEquals(SortOrder.ASC, builder.order());
|
||||
assertEquals(SortMode.MAX, builder.sortMode());
|
||||
assertNull(builder.getNestedFilter());
|
||||
assertNull(builder.getNestedPath());
|
||||
}
|
||||
|
||||
public void testParseJsonOldStyle() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
String scriptSort = "{\n" +
|
||||
"\"_script\" : {\n" +
|
||||
"\"type\" : \"number\",\n" +
|
||||
"\"script\" : \"doc['field_name'].value * factor\",\n" +
|
||||
"\"params\" : {\n" +
|
||||
"\"factor\" : 1.1\n" +
|
||||
"},\n" +
|
||||
"\"mode\" : \"max\",\n" +
|
||||
"\"order\" : \"asc\"\n" +
|
||||
"} }\n";
|
||||
XContentParser parser = XContentFactory.xContent(scriptSort).createParser(scriptSort);
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
|
||||
context.reset(parser);
|
||||
ScriptSortBuilder builder = ScriptSortBuilder.PROTOTYPE.fromXContent(context, null);
|
||||
assertEquals("doc['field_name'].value * factor", builder.script().getScript());
|
||||
assertNull(builder.script().getLang());
|
||||
assertEquals(1.1, builder.script().getParams().get("factor"));
|
||||
assertEquals(ScriptType.INLINE, builder.script().getType());
|
||||
assertEquals(ScriptSortType.NUMBER, builder.type());
|
||||
assertEquals(SortOrder.ASC, builder.order());
|
||||
assertEquals(SortMode.MAX, builder.sortMode());
|
||||
assertNull(builder.getNestedFilter());
|
||||
assertNull(builder.getNestedPath());
|
||||
}
|
||||
|
||||
public void testParseBadFieldNameExceptions() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
String scriptSort = "{\"_script\" : {" + "\"bad_field\" : \"number\"" + "} }";
|
||||
XContentParser parser = XContentFactory.xContent(scriptSort).createParser(scriptSort);
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
|
||||
context.reset(parser);
|
||||
exceptionRule.expect(ParsingException.class);
|
||||
exceptionRule.expectMessage("failed to parse field [bad_field]");
|
||||
ScriptSortBuilder.PROTOTYPE.fromXContent(context, null);
|
||||
}
|
||||
|
||||
public void testParseBadFieldNameExceptionsOnStartObject() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
|
||||
String scriptSort = "{\"_script\" : {" + "\"bad_field\" : { \"order\" : \"asc\" } } }";
|
||||
XContentParser parser = XContentFactory.xContent(scriptSort).createParser(scriptSort);
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
|
||||
context.reset(parser);
|
||||
exceptionRule.expect(ParsingException.class);
|
||||
exceptionRule.expectMessage("failed to parse field [bad_field]");
|
||||
ScriptSortBuilder.PROTOTYPE.fromXContent(context, null);
|
||||
}
|
||||
|
||||
public void testParseUnexpectedToken() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
|
||||
String scriptSort = "{\"_script\" : {" + "\"script\" : [ \"order\" : \"asc\" ] } }";
|
||||
XContentParser parser = XContentFactory.xContent(scriptSort).createParser(scriptSort);
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
|
||||
context.reset(parser);
|
||||
exceptionRule.expect(ParsingException.class);
|
||||
exceptionRule.expectMessage("unexpected token [START_ARRAY]");
|
||||
ScriptSortBuilder.PROTOTYPE.fromXContent(context, null);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
public class SortModeTests extends ESTestCase {
|
||||
|
||||
@Rule
|
||||
public ExpectedException exceptionRule = ExpectedException.none();
|
||||
|
||||
public void testSortMode() {
|
||||
// we rely on these ordinals in serialization, so changing them breaks bwc.
|
||||
assertEquals(0, SortMode.MIN.ordinal());
|
||||
assertEquals(1, SortMode.MAX.ordinal());
|
||||
assertEquals(2, SortMode.SUM.ordinal());
|
||||
assertEquals(3, SortMode.AVG.ordinal());
|
||||
assertEquals(4, SortMode.MEDIAN.ordinal());
|
||||
|
||||
assertEquals("min", SortMode.MIN.toString());
|
||||
assertEquals("max", SortMode.MAX.toString());
|
||||
assertEquals("sum", SortMode.SUM.toString());
|
||||
assertEquals("avg", SortMode.AVG.toString());
|
||||
assertEquals("median", SortMode.MEDIAN.toString());
|
||||
|
||||
for (SortMode mode : SortMode.values()) {
|
||||
assertEquals(mode, SortMode.fromString(mode.toString()));
|
||||
assertEquals(mode, SortMode.fromString(mode.toString().toUpperCase(Locale.ROOT)));
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseNull() {
|
||||
exceptionRule.expect(NullPointerException.class);
|
||||
exceptionRule.expectMessage("input string is null");
|
||||
SortMode.fromString(null);
|
||||
}
|
||||
|
||||
public void testIllegalArgument() {
|
||||
exceptionRule.expect(IllegalArgumentException.class);
|
||||
exceptionRule.expectMessage("Unknown SortMode [xyz]");
|
||||
SortMode.fromString("xyz");
|
||||
}
|
||||
|
||||
}
|
|
@ -137,8 +137,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
try {
|
||||
channel.sendResponse(new StringMessageResponse("hello " + request.message));
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
assertThat(e.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", e);
|
||||
fail(e.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -162,8 +162,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
assertThat("got exception instead of a response: " + exp.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -193,8 +193,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
assertThat("got exception instead of a response: " + exp.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -218,7 +218,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
threadPool.getThreadContext().putHeader("test.pong.user", "pong_user");
|
||||
channel.sendResponse(response);
|
||||
} catch (IOException e) {
|
||||
assertThat(e.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", e);
|
||||
fail(e.getMessage());
|
||||
}
|
||||
});
|
||||
final Object context = new Object();
|
||||
|
@ -245,7 +246,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
assertThat("got exception instead of a response: " + exp.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
};
|
||||
StringMessageRequest ping = new StringMessageRequest("ping");
|
||||
|
@ -317,8 +319,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
try {
|
||||
channel.sendResponse(TransportResponse.Empty.INSTANCE, TransportResponseOptions.builder().withCompress(true).build());
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
assertThat(e.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", e);
|
||||
fail(e.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -341,8 +343,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
assertThat("got exception instead of a response: " + exp.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -364,8 +366,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
try {
|
||||
channel.sendResponse(new StringMessageResponse("hello " + request.message), TransportResponseOptions.builder().withCompress(true).build());
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
assertThat(e.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", e);
|
||||
fail(e.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -389,8 +391,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
assertThat("got exception instead of a response: " + exp.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -552,8 +554,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
try {
|
||||
channel.sendResponse(new StringMessageResponse("hello " + request.message));
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
assertThat(e.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", e);
|
||||
fail(e.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -613,7 +615,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response for " + counter + ": " + exp.getDetailedMessage());
|
||||
}
|
||||
});
|
||||
|
@ -959,8 +961,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
fail();
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1000,8 +1002,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
fail();
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1044,8 +1046,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
fail();
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1084,8 +1086,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
fail();
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -86,8 +86,8 @@ public class NettyScheduledPingTests extends ESTestCase {
|
|||
try {
|
||||
channel.sendResponse(TransportResponse.Empty.INSTANCE, TransportResponseOptions.EMPTY);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
assertThat(e.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", e);
|
||||
fail(e.getMessage());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -113,8 +113,8 @@ public class NettyScheduledPingTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
exp.printStackTrace();
|
||||
assertThat("got exception instead of a response: " + exp.getMessage(), false, equalTo(true));
|
||||
logger.error("Unexpected failure", exp);
|
||||
fail("got exception instead of a response: " + exp.getMessage());
|
||||
}
|
||||
}).txGet();
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ public class ConcurrentDocumentOperationIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
e.printStackTrace();
|
||||
logger.error("Unexpected exception while indexing", e);
|
||||
failure.set(e);
|
||||
latch.countDown();
|
||||
}
|
||||
|
|
|
@ -354,9 +354,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
switch (random.nextInt(6)) {
|
||||
case 0:
|
||||
// random simple
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: use random simple ids");
|
||||
}
|
||||
logger.info("--> use random simple ids");
|
||||
ids = new IDSource() {
|
||||
@Override
|
||||
public String next() {
|
||||
|
@ -366,9 +364,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
break;
|
||||
case 1:
|
||||
// random realistic unicode
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: use random realistic unicode ids");
|
||||
}
|
||||
logger.info("--> use random realistic unicode ids");
|
||||
ids = new IDSource() {
|
||||
@Override
|
||||
public String next() {
|
||||
|
@ -378,9 +374,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
break;
|
||||
case 2:
|
||||
// sequential
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: use seuquential ids");
|
||||
}
|
||||
logger.info("--> use sequential ids");
|
||||
ids = new IDSource() {
|
||||
int upto;
|
||||
|
||||
|
@ -392,9 +386,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
break;
|
||||
case 3:
|
||||
// zero-pad sequential
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: use zero-pad seuquential ids");
|
||||
}
|
||||
logger.info("--> use zero-padded sequential ids");
|
||||
ids = new IDSource() {
|
||||
final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX);
|
||||
final String zeroPad = String.format(Locale.ROOT, "%0" + TestUtil.nextInt(random, 4, 20) + "d", 0);
|
||||
|
@ -409,9 +401,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
break;
|
||||
case 4:
|
||||
// random long
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: use random long ids");
|
||||
}
|
||||
logger.info("--> use random long ids");
|
||||
ids = new IDSource() {
|
||||
final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX);
|
||||
int upto;
|
||||
|
@ -424,9 +414,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
break;
|
||||
case 5:
|
||||
// zero-pad random long
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: use zero-pad random long ids");
|
||||
}
|
||||
logger.info("--> use zero-padded random long ids");
|
||||
ids = new IDSource() {
|
||||
final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX);
|
||||
final String zeroPad = String.format(Locale.ROOT, "%015d", 0);
|
||||
|
@ -539,9 +527,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
idPrefix = "";
|
||||
} else {
|
||||
idPrefix = TestUtil.randomSimpleString(random);
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: use id prefix: " + idPrefix);
|
||||
}
|
||||
logger.debug("--> use id prefix {}", idPrefix);
|
||||
}
|
||||
|
||||
int numIDs;
|
||||
|
@ -564,9 +550,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
final IDAndVersion[] idVersions = new IDAndVersion[TestUtil.nextInt(random, numIDs / 2, numIDs * (TEST_NIGHTLY ? 8 : 2))];
|
||||
final Map<String, IDAndVersion> truth = new HashMap<>();
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: use " + numIDs + " ids; " + idVersions.length + " operations");
|
||||
}
|
||||
logger.debug("--> use {} ids; {} operations", numIDs, idVersions.length);
|
||||
|
||||
for (int i = 0; i < idVersions.length; i++) {
|
||||
|
||||
|
@ -596,10 +580,9 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
idVersions[i] = x;
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
for (IDAndVersion idVersion : idVersions) {
|
||||
System.out.println("id=" + idVersion.id + " version=" + idVersion.version + " delete?=" + idVersion.delete + " truth?=" + (truth.get(idVersion.id) == idVersion));
|
||||
}
|
||||
for (IDAndVersion idVersion : idVersions) {
|
||||
logger.debug("--> id={} version={} delete?={} truth?={}", idVersion.id, idVersion.version, idVersion.delete,
|
||||
truth.get(idVersion.id) == idVersion);
|
||||
}
|
||||
|
||||
final AtomicInteger upto = new AtomicInteger();
|
||||
|
@ -623,8 +606,8 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
if (index >= idVersions.length) {
|
||||
break;
|
||||
}
|
||||
if (VERBOSE && index % 100 == 0) {
|
||||
System.out.println(Thread.currentThread().getName() + ": index=" + index);
|
||||
if (index % 100 == 0) {
|
||||
logger.trace("{}: index={}", Thread.currentThread().getName(), index);
|
||||
}
|
||||
IDAndVersion idVersion = idVersions[index];
|
||||
|
||||
|
@ -657,18 +640,18 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
idVersion.indexFinishTime = System.nanoTime() - startTime;
|
||||
|
||||
if (threadRandom.nextInt(100) == 7) {
|
||||
System.out.println(threadID + ": TEST: now refresh at " + (System.nanoTime() - startTime));
|
||||
logger.trace("--> {}: TEST: now refresh at {}", threadID, System.nanoTime() - startTime);
|
||||
refresh();
|
||||
System.out.println(threadID + ": TEST: refresh done at " + (System.nanoTime() - startTime));
|
||||
logger.trace("--> {}: TEST: refresh done at {}", threadID, System.nanoTime() - startTime);
|
||||
}
|
||||
if (threadRandom.nextInt(100) == 7) {
|
||||
System.out.println(threadID + ": TEST: now flush at " + (System.nanoTime() - startTime));
|
||||
logger.trace("--> {}: TEST: now flush at {}", threadID, System.nanoTime() - startTime);
|
||||
try {
|
||||
flush();
|
||||
} catch (FlushNotAllowedEngineException fnaee) {
|
||||
// OK
|
||||
}
|
||||
System.out.println(threadID + ": TEST: flush done at " + (System.nanoTime() - startTime));
|
||||
logger.trace("--> {}: TEST: flush done at {}", threadID, System.nanoTime() - startTime);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
@ -696,16 +679,17 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
}
|
||||
long actualVersion = client().prepareGet("test", "type", id).execute().actionGet().getVersion();
|
||||
if (actualVersion != expected) {
|
||||
System.out.println("FAILED: idVersion=" + idVersion + " actualVersion=" + actualVersion);
|
||||
logger.error("--> FAILED: idVersion={} actualVersion= {}", idVersion, actualVersion);
|
||||
failed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (failed) {
|
||||
System.out.println("All versions:");
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int i = 0; i < idVersions.length; i++) {
|
||||
System.out.println("i=" + i + " " + idVersions[i]);
|
||||
sb.append("i=").append(i).append(" ").append(idVersions[i]).append(System.lineSeparator());
|
||||
}
|
||||
logger.error("All versions: {}", sb);
|
||||
fail("wrong versions for some IDs");
|
||||
}
|
||||
}
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -1,4 +1,4 @@
|
|||
# you can override this using by setting a system property, for example -Des.logger.level=DEBUG
|
||||
# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG
|
||||
es.logger.level: INFO
|
||||
rootLogger: ${es.logger.level}, console
|
||||
logger:
|
||||
|
|
|
@ -99,7 +99,7 @@ fi
|
|||
# Define other required variables
|
||||
PID_FILE="$PID_DIR/$NAME.pid"
|
||||
DAEMON=$ES_HOME/bin/elasticsearch
|
||||
DAEMON_OPTS="-d -p $PID_FILE -D es.default.path.home=$ES_HOME -D es.default.path.logs=$LOG_DIR -D es.default.path.data=$DATA_DIR -D es.default.path.conf=$CONF_DIR"
|
||||
DAEMON_OPTS="-d -p $PID_FILE -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR"
|
||||
|
||||
export ES_HEAP_SIZE
|
||||
export ES_HEAP_NEWSIZE
|
||||
|
|
|
@ -117,7 +117,7 @@ start() {
|
|||
cd $ES_HOME
|
||||
echo -n $"Starting $prog: "
|
||||
# if not running, start it up here, usually something like "daemon $exec"
|
||||
daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -D es.default.path.home=$ES_HOME -D es.default.path.logs=$LOG_DIR -D es.default.path.data=$DATA_DIR -D es.default.path.conf=$CONF_DIR
|
||||
daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Ees.default.path.home=$ES_HOME -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR
|
||||
retval=$?
|
||||
echo
|
||||
[ $retval -eq 0 ] && touch $lockfile
|
||||
|
|
|
@ -20,11 +20,10 @@ Group=elasticsearch
|
|||
ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec
|
||||
|
||||
ExecStart=/usr/share/elasticsearch/bin/elasticsearch \
|
||||
-Des.pidfile=${PID_DIR}/elasticsearch.pid \
|
||||
-Des.default.path.home=${ES_HOME} \
|
||||
-Des.default.path.logs=${LOG_DIR} \
|
||||
-Des.default.path.data=${DATA_DIR} \
|
||||
-Des.default.path.conf=${CONF_DIR}
|
||||
-p ${PID_DIR}/elasticsearch.pid \
|
||||
-Ees.default.path.logs=${LOG_DIR} \
|
||||
-Ees.default.path.data=${DATA_DIR} \
|
||||
-Ees.default.path.conf=${CONF_DIR}
|
||||
|
||||
StandardOutput=journal
|
||||
StandardError=inherit
|
||||
|
|
|
@ -127,10 +127,10 @@ export HOSTNAME
|
|||
daemonized=`echo $* | egrep -- '(^-d |-d$| -d |--daemonize$|--daemonize )'`
|
||||
if [ -z "$daemonized" ] ; then
|
||||
exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.bootstrap.Elasticsearch start "$@"
|
||||
org.elasticsearch.bootstrap.Elasticsearch "$@"
|
||||
else
|
||||
exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Des.path.home="$ES_HOME" -cp "$ES_CLASSPATH" \
|
||||
org.elasticsearch.bootstrap.Elasticsearch start "$@" <&- &
|
||||
org.elasticsearch.bootstrap.Elasticsearch "$@" <&- &
|
||||
retval=$?
|
||||
pid=$!
|
||||
[ $retval -eq 0 ] || exit $retval
|
||||
|
|
|
@ -104,4 +104,4 @@ ECHO additional elements via the plugin mechanism, or if code must really be 1>&
|
|||
ECHO added to the main classpath, add jars to lib\, unsupported 1>&2
|
||||
EXIT /B 1
|
||||
)
|
||||
set ES_PARAMS=-Delasticsearch -Des-foreground=yes -Des.path.home="%ES_HOME%"
|
||||
set ES_PARAMS=-Delasticsearch -Des.path.home="%ES_HOME%"
|
||||
|
|
|
@ -152,7 +152,7 @@ if "%DATA_DIR%" == "" set DATA_DIR=%ES_HOME%\data
|
|||
|
||||
if "%CONF_DIR%" == "" set CONF_DIR=%ES_HOME%\config
|
||||
|
||||
set ES_PARAMS=-Delasticsearch;-Des.path.home="%ES_HOME%";-Des.default.path.home="%ES_HOME%";-Des.default.path.logs="%LOG_DIR%";-Des.default.path.data="%DATA_DIR%";-Des.default.path.conf="%CONF_DIR%"
|
||||
set ES_PARAMS=-Delasticsearch;-Des.path.home="%ES_HOME%";-Des.default.path.logs="%LOG_DIR%";-Des.default.path.data="%DATA_DIR%";-Des.default.path.conf="%CONF_DIR%"
|
||||
|
||||
set JVM_OPTS=%JAVA_OPTS: =;%
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# you can override this using by setting a system property, for example -Des.logger.level=DEBUG
|
||||
# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG
|
||||
es.logger.level: INFO
|
||||
rootLogger: ${es.logger.level}, console, file
|
||||
logger:
|
||||
|
|
|
@ -167,7 +167,7 @@ can do this as follows:
|
|||
|
||||
[source,sh]
|
||||
---------------------
|
||||
sudo bin/elasticsearch-plugin -Des.path.conf=/path/to/custom/config/dir install <plugin name>
|
||||
sudo bin/elasticsearch-plugin -Ees.path.conf=/path/to/custom/config/dir install <plugin name>
|
||||
---------------------
|
||||
|
||||
You can also set the `CONF_DIR` environment variable to the custom config
|
||||
|
|
|
@ -163,7 +163,7 @@ As mentioned previously, we can override either the cluster or node name. This c
|
|||
|
||||
[source,sh]
|
||||
--------------------------------------------------
|
||||
./elasticsearch --cluster.name my_cluster_name --node.name my_node_name
|
||||
./elasticsearch -Ees.cluster.name=my_cluster_name -Ees.node.name=my_node_name
|
||||
--------------------------------------------------
|
||||
|
||||
Also note the line marked http with information about the HTTP address (`192.168.8.112`) and port (`9200`) that our node is reachable from. By default, Elasticsearch uses port `9200` to provide access to its REST API. This port is configurable if necessary.
|
||||
|
|
|
@ -14,7 +14,7 @@ attribute as follows:
|
|||
|
||||
[source,sh]
|
||||
------------------------
|
||||
bin/elasticsearch --node.rack rack1 --node.size big <1>
|
||||
bin/elasticsearch -Ees.node.rack=rack1 -Ees.node.size=big <1>
|
||||
------------------------
|
||||
<1> These attribute settings can also be specified in the `elasticsearch.yml` config file.
|
||||
|
||||
|
|
|
@ -15,10 +15,10 @@ on all nodes. To disable ingest on a node, configure the following setting in th
|
|||
node.ingest: false
|
||||
--------------------------------------------------
|
||||
|
||||
To pre-process documents before indexing, you <<pipe-line,define a pipeline>> that specifies
|
||||
To pre-process documents before indexing, you <<pipeline,define a pipeline>> that specifies
|
||||
a series of <<ingest-processors,processors>>. Each processor transforms the document in some way.
|
||||
For example, you may have a pipeline that consists of one processor that removes a field from
|
||||
the document followed by another processor that renames a field.
|
||||
the document followed by another processor that renames a field.
|
||||
|
||||
To use a pipeline, you simply specify the `pipeline` parameter on an index or bulk request to
|
||||
tell the ingest node which pipeline to use. For example:
|
||||
|
@ -32,7 +32,7 @@ PUT /my-index/my-type/my-id?pipeline=my_pipeline_id
|
|||
--------------------------------------------------
|
||||
// AUTOSENSE
|
||||
|
||||
See <<ingest-apis,Ingest APIs>> for more information about creating, adding, and deleting pipelines.
|
||||
See <<ingest-apis,Ingest APIs>> for more information about creating, adding, and deleting pipelines.
|
||||
|
||||
--
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[[pipe-line]]
|
||||
[[pipeline]]
|
||||
== Pipeline Definition
|
||||
|
||||
A pipeline is a definition of a series of <<ingest-processors, processors>> that are to be executed
|
||||
|
|
|
@ -211,12 +211,17 @@ For simplicity, only one way of adding the ids to the existing list (empty by de
|
|||
|
||||
The inner DirectCandidateGenerator class has been moved out to its own class called DirectCandidateGeneratorBuilder.
|
||||
|
||||
==== SuggestBuilder
|
||||
===== SortBuilders
|
||||
|
||||
The `sortMode` setter in `FieldSortBuilder`, `GeoDistanceSortBuilder` and `ScriptSortBuilder` now
|
||||
accept a `SortMode` enum instead of a String constant. Also the getter returns the same enum type.
|
||||
|
||||
===== SuggestBuilder
|
||||
|
||||
The `setText` method has been changed to `setGlobalText` to make the intent more clear, and a `getGlobalText` method has been added.
|
||||
|
||||
The `addSuggestion` method now required the user specified suggestion name, previously used in the ctor of each suggestion.
|
||||
|
||||
=== SuggestionBuilder
|
||||
===== SuggestionBuilder
|
||||
|
||||
The `field` setter has been deleted. Instead the field name needs to be specified as constructor argument.
|
||||
|
|
|
@ -172,3 +172,18 @@ Two cache concurrency level settings
|
|||
`indices.fielddata.cache.concurrency_level` because they no longer apply to
|
||||
the cache implementation used for the request cache and the field data cache.
|
||||
|
||||
==== Using system properties to configure Elasticsearch
|
||||
|
||||
Elasticsearch can be configured by setting system properties on the
|
||||
command line via `-Des.name.of.property=value.of.property`. This will be
|
||||
removed in a future version of Elasticsearch. Instead, use
|
||||
`-E es.name.of.setting=value.of.setting`. Note that in all cases the
|
||||
name of the setting must be prefixed with `es.`.
|
||||
|
||||
==== Removed using double-dashes to configure Elasticsearch
|
||||
|
||||
Elasticsearch could previously be configured on the command line by
|
||||
setting settings via `--name.of.setting value.of.setting`. This feature
|
||||
has been removed. Instead, use
|
||||
`-Ees.name.of.setting=value.of.setting`. Note that in all cases the
|
||||
name of the setting must be prefixed with `es.`.
|
||||
|
|
|
@ -21,7 +21,7 @@ attribute called `rack_id` -- we could use any attribute name. For example:
|
|||
|
||||
[source,sh]
|
||||
----------------------
|
||||
./bin/elasticsearch --node.rack_id rack_one <1>
|
||||
./bin/elasticsearch -Ees.node.rack_id=rack_one <1>
|
||||
----------------------
|
||||
<1> This setting could also be specified in the `elasticsearch.yml` config file.
|
||||
|
||||
|
|
|
@ -40,11 +40,20 @@ A tribe node, configured via the `tribe.*` settings, is a special type of
|
|||
client node that can connect to multiple clusters and perform search and other
|
||||
operations across all connected clusters.
|
||||
|
||||
<<ingest,Ingest node>>::
|
||||
|
||||
A node that has `node.ingest` set to `true` (default). Ingest nodes are able
|
||||
to apply an <<pipeline,ingest pipeline>> to a document in order to transform
|
||||
and enrich the document before indexing. With a heavy ingest load, it makes
|
||||
sense to use dedicated ingest nodes and to mark the master and data nodes as
|
||||
`node.ingest: false`.
|
||||
|
||||
By default a node is both a master-eligible node and a data node. This is very
|
||||
convenient for small clusters but, as the cluster grows, it becomes important
|
||||
to consider separating dedicated master-eligible nodes from dedicated data
|
||||
nodes.
|
||||
|
||||
|
||||
[NOTE]
|
||||
[[coordinating-node]]
|
||||
.Coordinating node
|
||||
|
@ -233,7 +242,7 @@ Like all node settings, it can also be specified on the command line as:
|
|||
|
||||
[source,sh]
|
||||
-----------------------
|
||||
./bin/elasticsearch --path.data /var/elasticsearch/data
|
||||
./bin/elasticsearch -Ees.path.data=/var/elasticsearch/data
|
||||
-----------------------
|
||||
|
||||
TIP: When using the `.zip` or `.tar.gz` distributions, the `path.data` setting
|
||||
|
|
|
@ -67,13 +67,12 @@ There are added features when using the `elasticsearch` shell script.
|
|||
The first, which was explained earlier, is the ability to easily run the
|
||||
process either in the foreground or the background.
|
||||
|
||||
Another feature is the ability to pass `-D` or getopt long style
|
||||
configuration parameters directly to the script. When set, all override
|
||||
anything set using either `JAVA_OPTS` or `ES_JAVA_OPTS`. For example:
|
||||
Another feature is the ability to pass `-E` configuration parameters
|
||||
directly to the script. For example:
|
||||
|
||||
[source,sh]
|
||||
--------------------------------------------------
|
||||
$ bin/elasticsearch -Des.index.refresh_interval=5s --node.name=my-node
|
||||
$ bin/elasticsearch -Ees.index.refresh_interval=5s -Ees.node.name=my-node
|
||||
--------------------------------------------------
|
||||
*************************************************************************
|
||||
|
||||
|
|
|
@ -259,7 +259,7 @@ command, for example:
|
|||
|
||||
[source,sh]
|
||||
--------------------------------------------------
|
||||
$ elasticsearch -Des.network.host=10.0.0.4
|
||||
$ elasticsearch -Ees.network.host=10.0.0.4
|
||||
--------------------------------------------------
|
||||
|
||||
Another option is to set `es.default.` prefix instead of `es.` prefix,
|
||||
|
@ -336,7 +336,7 @@ course, the above can also be set as a "collapsed" setting, for example:
|
|||
|
||||
[source,sh]
|
||||
--------------------------------------------------
|
||||
$ elasticsearch -Des.index.refresh_interval=5s
|
||||
$ elasticsearch -Ees.index.refresh_interval=5s
|
||||
--------------------------------------------------
|
||||
|
||||
All of the index level configuration can be found within each
|
||||
|
|
|
@ -80,7 +80,7 @@ To upgrade using a zip or compressed tarball:
|
|||
overwrite the `config` or `data` directories.
|
||||
|
||||
* Either copy the files in the `config` directory from your old installation
|
||||
to your new installation, or use the `--path.conf` option on the command
|
||||
to your new installation, or use the `-E path.conf=` option on the command
|
||||
line to point to an external config directory.
|
||||
|
||||
* Either copy the files in the `data` directory from your old installation
|
||||
|
|
|
@ -28,8 +28,8 @@ dependencies {
|
|||
|
||||
integTest {
|
||||
cluster {
|
||||
systemProperty 'es.script.inline', 'true'
|
||||
systemProperty 'es.script.indexed', 'true'
|
||||
setting 'script.inline', 'true'
|
||||
setting 'script.indexed', 'true'
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -316,10 +316,9 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
System.out.println();
|
||||
System.out.println("max repeat: " + maxRepeat);
|
||||
System.out.println("avg repeat: " + sumRepeat / (double) filled);
|
||||
System.out.println("distribution: " + filled / (double) count);
|
||||
logger.info("max repeat: {}", maxRepeat);
|
||||
logger.info("avg repeat: {}", sumRepeat / (double) filled);
|
||||
logger.info("distribution: {}", filled / (double) count);
|
||||
|
||||
int percentile50 = filled / 2;
|
||||
int percentile25 = (filled / 4);
|
||||
|
@ -333,18 +332,18 @@ public class RandomScoreFunctionTests extends ESIntegTestCase {
|
|||
}
|
||||
sum += i * matrix[i];
|
||||
if (percentile50 == 0) {
|
||||
System.out.println("median: " + i);
|
||||
logger.info("median: {}", i);
|
||||
} else if (percentile25 == 0) {
|
||||
System.out.println("percentile_25: " + i);
|
||||
logger.info("percentile_25: {}", i);
|
||||
} else if (percentile75 == 0) {
|
||||
System.out.println("percentile_75: " + i);
|
||||
logger.info("percentile_75: {}", i);
|
||||
}
|
||||
percentile50--;
|
||||
percentile25--;
|
||||
percentile75--;
|
||||
}
|
||||
|
||||
System.out.println("mean: " + sum / (double) count);
|
||||
logger.info("mean: {}", sum / (double) count);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.plugins.Plugin;
|
|||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -109,7 +110,7 @@ public class SimpleSortTests extends ESIntegTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(size)
|
||||
.addSort(new ScriptSortBuilder(new Script("doc['str_value'].value"), "string")).execute().actionGet();
|
||||
.addSort(new ScriptSortBuilder(new Script("doc['str_value'].value"), ScriptSortType.STRING)).execute().actionGet();
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -217,7 +218,7 @@ public class SimpleSortTests extends ESIntegTestCase {
|
|||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo((double) i, TOLERANCE));
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo(i, TOLERANCE));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -326,7 +327,7 @@ public class SimpleSortTests extends ESIntegTestCase {
|
|||
}
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.scriptSort(new Script("\u0027\u0027"), "string")).setSize(10).execute().actionGet();
|
||||
.addSort(SortBuilders.scriptSort(new Script("\u0027\u0027"), ScriptSortType.STRING)).setSize(10).execute().actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.plugins.Plugin;
|
|||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
|
@ -68,7 +69,8 @@ public class GroovyScriptTests extends ESIntegTestCase {
|
|||
public void assertScript(String scriptString) {
|
||||
Script script = new Script(scriptString, ScriptType.INLINE, "groovy", null);
|
||||
SearchResponse resp = client().prepareSearch("test")
|
||||
.setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).sort(SortBuilders.scriptSort(script, "number")))
|
||||
.setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).sort(SortBuilders.
|
||||
scriptSort(script, ScriptSortType.NUMBER)))
|
||||
.get();
|
||||
assertNoFailures(resp);
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ dependencies {
|
|||
|
||||
integTest {
|
||||
cluster {
|
||||
systemProperty 'es.script.inline', 'true'
|
||||
systemProperty 'es.script.indexed', 'true'
|
||||
setting 'script.inline', 'true'
|
||||
setting 'script.indexed', 'true'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -276,7 +276,7 @@ public class DeleteByQueryTests extends ESIntegTestCase {
|
|||
assertSearchContextsClosed();
|
||||
}
|
||||
|
||||
public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Exception {
|
||||
public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable {
|
||||
createIndex("test");
|
||||
ensureGreen();
|
||||
|
||||
|
@ -324,18 +324,17 @@ public class DeleteByQueryTests extends ESIntegTestCase {
|
|||
|
||||
Throwable assertionError = exceptionHolder.get();
|
||||
if (assertionError != null) {
|
||||
assertionError.printStackTrace();
|
||||
throw assertionError;
|
||||
}
|
||||
assertThat(assertionError + " should be null", assertionError, nullValue());
|
||||
refresh();
|
||||
|
||||
refresh();
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", i)).get(), 0);
|
||||
}
|
||||
assertSearchContextsClosed();
|
||||
}
|
||||
|
||||
public void testConcurrentDeleteByQueriesOnSameDocs() throws Exception {
|
||||
public void testConcurrentDeleteByQueriesOnSameDocs() throws Throwable {
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)));
|
||||
ensureGreen();
|
||||
|
||||
|
@ -386,9 +385,8 @@ public class DeleteByQueryTests extends ESIntegTestCase {
|
|||
|
||||
Throwable assertionError = exceptionHolder.get();
|
||||
if (assertionError != null) {
|
||||
assertionError.printStackTrace();
|
||||
throw assertionError;
|
||||
}
|
||||
assertThat(assertionError + " should be null", assertionError, nullValue());
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).get(), 0L);
|
||||
assertThat(deleted.get(), equalTo(docs));
|
||||
assertSearchContextsClosed();
|
||||
|
@ -445,4 +443,4 @@ public class DeleteByQueryTests extends ESIntegTestCase {
|
|||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ dependencies {
|
|||
|
||||
integTest {
|
||||
cluster {
|
||||
systemProperty 'es.script.inline', 'true'
|
||||
systemProperty 'es.script.indexed', 'true'
|
||||
setting 'script.inline', 'true'
|
||||
setting 'script.indexed', 'true'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,75 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript;
|
||||
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SimpleBench {
|
||||
|
||||
public static void main(String[] args) {
|
||||
JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS);
|
||||
Object compiled = se.compile("x + y", Collections.emptyMap());
|
||||
CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled);
|
||||
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
// warm up
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
vars.put("x", i);
|
||||
vars.put("y", i + 1);
|
||||
se.executable(compiledScript, vars).run();
|
||||
}
|
||||
|
||||
final long ITER = 100000;
|
||||
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
se.executable(compiledScript, vars).run();
|
||||
}
|
||||
System.out.println("Execute Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
ExecutableScript executableScript = se.executable(compiledScript, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
executableScript = se.executable(compiledScript, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
executableScript.setNextVar(entry.getKey(), entry.getValue());
|
||||
}
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable (vars) Took: " + stopWatch.stop().lastTaskTime());
|
||||
}
|
||||
}
|
|
@ -28,8 +28,8 @@ dependencies {
|
|||
|
||||
integTest {
|
||||
cluster {
|
||||
systemProperty 'es.script.inline', 'true'
|
||||
systemProperty 'es.script.indexed', 'true'
|
||||
setting 'script.inline', 'true'
|
||||
setting 'script.indexed', 'true'
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,76 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.python;
|
||||
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SimpleBench {
|
||||
|
||||
public static void main(String[] args) {
|
||||
PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS);
|
||||
Object compiled = se.compile("x + y", Collections.emptyMap());
|
||||
CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "SimpleBench", "python", compiled);
|
||||
|
||||
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
// warm up
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
vars.put("x", i);
|
||||
vars.put("y", i + 1);
|
||||
se.executable(compiledScript, vars).run();
|
||||
}
|
||||
|
||||
final long ITER = 100000;
|
||||
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
se.executable(compiledScript, vars).run();
|
||||
}
|
||||
System.out.println("Execute Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
ExecutableScript executableScript = se.executable(compiledScript, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
executableScript = se.executable(compiledScript, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
executableScript.setNextVar(entry.getKey(), entry.getValue());
|
||||
}
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable (vars) Took: " + stopWatch.stop().lastTaskTime());
|
||||
}
|
||||
}
|
|
@ -17,16 +17,18 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.smoketest;
|
||||
package org.elasticsearch.backwards;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
||||
import org.apache.lucene.util.TimeUnits;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // some of the windows test VMs are slow as hell
|
||||
public class MultiNodeBackwardsIT extends ESRestTestCase {
|
||||
|
||||
public MultiNodeBackwardsIT(RestTestCandidate testCandidate) {
|
||||
|
|
|
@ -1,164 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import joptsimple.OptionException;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.CommandTestCase;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
@SuppressForbidden(reason = "modifies system properties intentionally")
|
||||
public class BootstrapCliParserTests extends CommandTestCase {
|
||||
|
||||
@Override
|
||||
protected Command newCommand() {
|
||||
return new BootstrapCliParser();
|
||||
}
|
||||
|
||||
private List<String> propertiesToClear = new ArrayList<>();
|
||||
private Map<Object, Object> properties;
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
this.properties = new HashMap<>(System.getProperties());
|
||||
}
|
||||
|
||||
@After
|
||||
public void clearProperties() {
|
||||
for (String property : propertiesToClear) {
|
||||
System.clearProperty(property);
|
||||
}
|
||||
propertiesToClear.clear();
|
||||
assertEquals("properties leaked", properties, new HashMap<>(System.getProperties()));
|
||||
}
|
||||
|
||||
void assertShouldRun(boolean shouldRun) {
|
||||
BootstrapCliParser parser = (BootstrapCliParser)command;
|
||||
assertEquals(shouldRun, parser.shouldRun());
|
||||
}
|
||||
|
||||
public void testVersion() throws Exception {
|
||||
String output = execute("-V");
|
||||
assertTrue(output, output.contains(Version.CURRENT.toString()));
|
||||
assertTrue(output, output.contains(Build.CURRENT.shortHash()));
|
||||
assertTrue(output, output.contains(Build.CURRENT.date()));
|
||||
assertTrue(output, output.contains(JvmInfo.jvmInfo().version()));
|
||||
assertShouldRun(false);
|
||||
|
||||
terminal.reset();
|
||||
output = execute("--version");
|
||||
assertTrue(output, output.contains(Version.CURRENT.toString()));
|
||||
assertTrue(output, output.contains(Build.CURRENT.shortHash()));
|
||||
assertTrue(output, output.contains(Build.CURRENT.date()));
|
||||
assertTrue(output, output.contains(JvmInfo.jvmInfo().version()));
|
||||
assertShouldRun(false);
|
||||
}
|
||||
|
||||
public void testPidfile() throws Exception {
|
||||
registerProperties("es.pidfile");
|
||||
|
||||
// missing argument
|
||||
OptionException e = expectThrows(OptionException.class, () -> {
|
||||
execute("-p");
|
||||
});
|
||||
assertEquals("Option p/pidfile requires an argument", e.getMessage());
|
||||
assertShouldRun(false);
|
||||
|
||||
// good cases
|
||||
terminal.reset();
|
||||
execute("--pidfile", "/tmp/pid");
|
||||
assertSystemProperty("es.pidfile", "/tmp/pid");
|
||||
assertShouldRun(true);
|
||||
|
||||
System.clearProperty("es.pidfile");
|
||||
terminal.reset();
|
||||
execute("-p", "/tmp/pid");
|
||||
assertSystemProperty("es.pidfile", "/tmp/pid");
|
||||
assertShouldRun(true);
|
||||
}
|
||||
|
||||
public void testNoDaemonize() throws Exception {
|
||||
registerProperties("es.foreground");
|
||||
|
||||
execute();
|
||||
assertSystemProperty("es.foreground", null);
|
||||
assertShouldRun(true);
|
||||
}
|
||||
|
||||
public void testDaemonize() throws Exception {
|
||||
registerProperties("es.foreground");
|
||||
|
||||
execute("-d");
|
||||
assertSystemProperty("es.foreground", "false");
|
||||
assertShouldRun(true);
|
||||
|
||||
System.clearProperty("es.foreground");
|
||||
execute("--daemonize");
|
||||
assertSystemProperty("es.foreground", "false");
|
||||
assertShouldRun(true);
|
||||
}
|
||||
|
||||
public void testConfig() throws Exception {
|
||||
registerProperties("es.foo", "es.spam");
|
||||
|
||||
execute("-Dfoo=bar", "-Dspam=eggs");
|
||||
assertSystemProperty("es.foo", "bar");
|
||||
assertSystemProperty("es.spam", "eggs");
|
||||
assertShouldRun(true);
|
||||
}
|
||||
|
||||
public void testConfigMalformed() throws Exception {
|
||||
UserError e = expectThrows(UserError.class, () -> {
|
||||
execute("-Dfoo");
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Malformed elasticsearch setting"));
|
||||
}
|
||||
|
||||
public void testUnknownOption() throws Exception {
|
||||
OptionException e = expectThrows(OptionException.class, () -> {
|
||||
execute("--network.host");
|
||||
});
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("network.host is not a recognized option"));
|
||||
}
|
||||
|
||||
private void registerProperties(String ... systemProperties) {
|
||||
propertiesToClear.addAll(Arrays.asList(systemProperties));
|
||||
}
|
||||
|
||||
private void assertSystemProperty(String name, String expectedValue) throws Exception {
|
||||
String msg = String.format(Locale.ROOT, "Expected property %s to be %s, terminal output was %s", name, expectedValue, terminal.getOutput());
|
||||
assertThat(msg, System.getProperty(name), is(expectedValue));
|
||||
}
|
||||
}
|
|
@ -21,6 +21,6 @@ apply plugin: 'elasticsearch.rest-test'
|
|||
|
||||
integTest {
|
||||
cluster {
|
||||
systemProperty 'es.node.ingest', 'false'
|
||||
setting 'node.ingest', 'false'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,6 @@ apply plugin: 'elasticsearch.rest-test'
|
|||
|
||||
integTest {
|
||||
cluster {
|
||||
systemProperty 'es.script.inline', 'true'
|
||||
setting 'script.inline', 'true'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -303,7 +303,7 @@ run_elasticsearch_service() {
|
|||
# This line is attempting to emulate the on login behavior of /usr/share/upstart/sessions/jayatana.conf
|
||||
[ -f /usr/share/java/jayatanaag.jar ] && export JAVA_TOOL_OPTIONS="-javaagent:/usr/share/java/jayatanaag.jar"
|
||||
# And now we can start Elasticsearch normally, in the background (-d) and with a pidfile (-p).
|
||||
$timeoutCommand/tmp/elasticsearch/bin/elasticsearch $background -p /tmp/elasticsearch/elasticsearch.pid -Des.path.conf=$CONF_DIR $commandLineArgs
|
||||
$timeoutCommand/tmp/elasticsearch/bin/elasticsearch $background -p /tmp/elasticsearch/elasticsearch.pid -Ees.path.conf=$CONF_DIR $commandLineArgs
|
||||
BASH
|
||||
[ "$status" -eq "$expectedStatus" ]
|
||||
elif is_systemd; then
|
||||
|
|
|
@ -102,7 +102,7 @@ fi
|
|||
echo "CONF_FILE=$CONF_FILE" >> /etc/sysconfig/elasticsearch;
|
||||
fi
|
||||
|
||||
run_elasticsearch_service 1 -Des.default.config="$CONF_FILE"
|
||||
run_elasticsearch_service 1 -Ees.default.config="$CONF_FILE"
|
||||
|
||||
# remove settings again otherwise cleaning up before next testrun will fail
|
||||
if is_dpkg ; then
|
||||
|
@ -408,7 +408,7 @@ fi
|
|||
remove_jvm_example
|
||||
|
||||
local relativePath=${1:-$(readlink -m jvm-example-*.zip)}
|
||||
sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install "file://$relativePath" -Des.logger.level=DEBUG > /tmp/plugin-cli-output
|
||||
sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install "file://$relativePath" -Ees.logger.level=DEBUG > /tmp/plugin-cli-output
|
||||
local loglines=$(cat /tmp/plugin-cli-output | wc -l)
|
||||
if [ "$GROUP" == "TAR PLUGINS" ]; then
|
||||
[ "$loglines" -gt "7" ] || {
|
||||
|
|
|
@ -29,7 +29,6 @@ subprojects {
|
|||
|
||||
// the main files are actually test files, so use the appropriate forbidden api sigs
|
||||
forbiddenApisMain {
|
||||
bundledSignatures = ['jdk-unsafe', 'jdk-deprecated']
|
||||
signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'),
|
||||
PrecommitTasks.getResource('/forbidden/es-signatures.txt'),
|
||||
PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')]
|
||||
|
|
|
@ -38,7 +38,6 @@ compileTestJava.options.compilerArgs << '-Xlint:-rawtypes'
|
|||
|
||||
// the main files are actually test files, so use the appropriate forbidden api sigs
|
||||
forbiddenApisMain {
|
||||
bundledSignatures = ['jdk-unsafe', 'jdk-deprecated']
|
||||
signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'),
|
||||
PrecommitTasks.getResource('/forbidden/test-signatures.txt')]
|
||||
}
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.cli;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.isEmptyString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public abstract class CliToolTestCase extends ESTestCase {
|
||||
|
||||
@Before
|
||||
@SuppressForbidden(reason = "sets es.default.path.home during tests")
|
||||
public void setPathHome() {
|
||||
System.setProperty("es.default.path.home", createTempDir().toString());
|
||||
}
|
||||
|
||||
@After
|
||||
@SuppressForbidden(reason = "clears es.default.path.home during tests")
|
||||
public void clearPathHome() {
|
||||
System.clearProperty("es.default.path.home");
|
||||
}
|
||||
|
||||
public static String[] args(String command) {
|
||||
if (!Strings.hasLength(command)) {
|
||||
return Strings.EMPTY_ARRAY;
|
||||
}
|
||||
return command.split("\\s+");
|
||||
}
|
||||
|
||||
public static void assertTerminalOutputContainsHelpFile(MockTerminal terminal, String classPath) throws IOException {
|
||||
String output = terminal.getOutput();
|
||||
assertThat(output, not(isEmptyString()));
|
||||
String expectedDocs = StreamsUtils.copyToStringFromClasspath(classPath);
|
||||
// convert to *nix newlines as MockTerminal used for tests also uses *nix newlines
|
||||
expectedDocs = expectedDocs.replace("\r\n", "\n");
|
||||
assertThat(output, containsString(expectedDocs));
|
||||
}
|
||||
}
|
|
@ -38,8 +38,6 @@ import org.objectweb.asm.tree.analysis.BasicInterpreter;
|
|||
import org.objectweb.asm.tree.analysis.BasicValue;
|
||||
import org.objectweb.asm.tree.analysis.Frame;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.FileVisitResult;
|
||||
|
@ -59,6 +57,7 @@ public class ESLoggerUsageChecker {
|
|||
public static final List<String> LOGGER_METHODS = Arrays.asList("trace", "debug", "info", "warn", "error");
|
||||
public static final String IGNORE_CHECKS_ANNOTATION = "org.elasticsearch.common.SuppressLoggerChecks";
|
||||
|
||||
@SuppressForbidden(reason = "command line tool")
|
||||
public static void main(String... args) throws Exception {
|
||||
System.out.println("checking for wrong usages of ESLogger...");
|
||||
boolean[] wrongUsageFound = new boolean[1];
|
||||
|
|
|
@ -17,23 +17,19 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
package org.elasticsearch.test.loggerusage;
|
||||
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
// TODO once sort refactoring is done this needs to be merged into SortBuilder
|
||||
public interface SortElementParserTemp<T extends SortBuilder> {
|
||||
/**
|
||||
* Creates a new SortBuilder from the json held by the {@link SortElementParserTemp}
|
||||
* in {@link org.elasticsearch.common.xcontent.XContent} format
|
||||
*
|
||||
* @param context
|
||||
* the input parse context. The state on the parser contained in
|
||||
* this context will be changed as a side effect of this method
|
||||
* call
|
||||
* @return the new item
|
||||
*/
|
||||
T fromXContent(QueryParseContext context, String elementName) throws IOException;
|
||||
/**
|
||||
* Annotation to suppress forbidden-apis errors inside a whole class, a method, or a field.
|
||||
* Duplicated from core as main sources of logger-usage project have no dependencies on core
|
||||
*/
|
||||
@Retention(RetentionPolicy.CLASS)
|
||||
@Target({ ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE })
|
||||
public @interface SuppressForbidden {
|
||||
String reason();
|
||||
}
|
Loading…
Reference in New Issue