Merge branch 'master' into tests/switch_to_random_value_other_than_for_sort
This commit is contained in:
commit
a8bf75983f
|
@ -76,7 +76,31 @@ Contributing to the Elasticsearch codebase
|
|||
|
||||
**Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch)
|
||||
|
||||
Make sure you have [Gradle](http://gradle.org) installed, as Elasticsearch uses it as its build system. Integration with IntelliJ and Eclipse should work out of the box. Eclipse users can automatically configure their IDE: `gradle eclipse` then `File: Import: Existing Projects into Workspace`. Select the option `Search for nested projects`. Additionally you will want to ensure that Eclipse is using 2048m of heap by modifying `eclipse.ini` accordingly to avoid GC overhead errors.
|
||||
Make sure you have [Gradle](http://gradle.org) installed, as
|
||||
Elasticsearch uses it as its build system.
|
||||
|
||||
Eclipse users can automatically configure their IDE: `gradle eclipse`
|
||||
then `File: Import: Existing Projects into Workspace`. Select the
|
||||
option `Search for nested projects`. Additionally you will want to
|
||||
ensure that Eclipse is using 2048m of heap by modifying `eclipse.ini`
|
||||
accordingly to avoid GC overhead errors.
|
||||
|
||||
IntelliJ users acn automatically configure their IDE: `gradle idea`
|
||||
then `File->New Project From Existing Sources`. Point to the root of
|
||||
the source directory, select
|
||||
`Import project from external model->Gradle`, enable
|
||||
`Use auto-import`.
|
||||
|
||||
The Elasticsearch codebase makes heavy use of Java `assert`s and the
|
||||
test runner requires that assertions be enabled within the JVM. This
|
||||
can be accomplished by passing the flag `-ea` to the JVM on startup.
|
||||
|
||||
For IntelliJ, go to
|
||||
`Run->Edit Configurations...->Defaults->JUnit->VM options` and input
|
||||
`-ea`.
|
||||
|
||||
For Eclipse, go to `Preferences->Java->Installed JREs` and add `-ea` to
|
||||
`VM Arguments`.
|
||||
|
||||
Please follow these formatting guidelines:
|
||||
|
||||
|
|
|
@ -41,7 +41,6 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* We enforce limits once any network host is configured. In this case we assume the node is running in production
|
||||
|
@ -63,40 +62,75 @@ final class BootstrapCheck {
|
|||
* @param boundTransportAddress the node network bindings
|
||||
*/
|
||||
static void check(final Settings settings, final BoundTransportAddress boundTransportAddress) {
|
||||
check(enforceLimits(boundTransportAddress), checks(settings), Node.NODE_NAME_SETTING.get(settings));
|
||||
check(
|
||||
enforceLimits(boundTransportAddress),
|
||||
BootstrapSettings.IGNORE_SYSTEM_BOOTSTRAP_CHECKS.get(settings),
|
||||
checks(settings),
|
||||
Node.NODE_NAME_SETTING.get(settings));
|
||||
}
|
||||
|
||||
/**
|
||||
* executes the provided checks and fails the node if
|
||||
* enforceLimits is true, otherwise logs warnings
|
||||
*
|
||||
* @param enforceLimits true if the checks should be enforced or
|
||||
* warned
|
||||
* @param checks the checks to execute
|
||||
* @param nodeName the node name to be used as a logging prefix
|
||||
* @param enforceLimits true if the checks should be enforced or
|
||||
* otherwise warned
|
||||
* @param ignoreSystemChecks true if system checks should be enforced
|
||||
* or otherwise warned
|
||||
* @param checks the checks to execute
|
||||
* @param nodeName the node name to be used as a logging prefix
|
||||
*/
|
||||
// visible for testing
|
||||
static void check(final boolean enforceLimits, final List<Check> checks, final String nodeName) {
|
||||
final ESLogger logger = Loggers.getLogger(BootstrapCheck.class, nodeName);
|
||||
static void check(final boolean enforceLimits, final boolean ignoreSystemChecks, final List<Check> checks, final String nodeName) {
|
||||
check(enforceLimits, ignoreSystemChecks, checks, Loggers.getLogger(BootstrapCheck.class, nodeName));
|
||||
}
|
||||
|
||||
final List<String> errors =
|
||||
checks.stream()
|
||||
.filter(BootstrapCheck.Check::check)
|
||||
.map(BootstrapCheck.Check::errorMessage)
|
||||
.collect(Collectors.toList());
|
||||
/**
|
||||
* executes the provided checks and fails the node if
|
||||
* enforceLimits is true, otherwise logs warnings
|
||||
*
|
||||
* @param enforceLimits true if the checks should be enforced or
|
||||
* otherwise warned
|
||||
* @param ignoreSystemChecks true if system checks should be enforced
|
||||
* or otherwise warned
|
||||
* @param checks the checks to execute
|
||||
* @param logger the logger to
|
||||
*/
|
||||
static void check(
|
||||
final boolean enforceLimits,
|
||||
final boolean ignoreSystemChecks,
|
||||
final List<Check> checks,
|
||||
final ESLogger logger) {
|
||||
final List<String> errors = new ArrayList<>();
|
||||
final List<String> ignoredErrors = new ArrayList<>();
|
||||
|
||||
for (final Check check : checks) {
|
||||
if (check.check()) {
|
||||
if (!enforceLimits || (check.isSystemCheck() && ignoreSystemChecks)) {
|
||||
ignoredErrors.add(check.errorMessage());
|
||||
} else {
|
||||
errors.add(check.errorMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!ignoredErrors.isEmpty()) {
|
||||
ignoredErrors.forEach(error -> log(logger, error));
|
||||
}
|
||||
|
||||
if (!errors.isEmpty()) {
|
||||
final List<String> messages = new ArrayList<>(1 + errors.size());
|
||||
messages.add("bootstrap checks failed");
|
||||
messages.addAll(errors);
|
||||
if (enforceLimits) {
|
||||
final RuntimeException re = new RuntimeException(String.join("\n", messages));
|
||||
errors.stream().map(IllegalStateException::new).forEach(re::addSuppressed);
|
||||
throw re;
|
||||
} else {
|
||||
messages.forEach(message -> logger.warn(message));
|
||||
}
|
||||
final RuntimeException re = new RuntimeException(String.join("\n", messages));
|
||||
errors.stream().map(IllegalStateException::new).forEach(re::addSuppressed);
|
||||
throw re;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static void log(final ESLogger logger, final String error) {
|
||||
logger.warn(error);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -151,6 +185,14 @@ final class BootstrapCheck {
|
|||
*/
|
||||
String errorMessage();
|
||||
|
||||
/**
|
||||
* test if the check is a system-level check
|
||||
*
|
||||
* @return true if the check is a system-level check as opposed
|
||||
* to an Elasticsearch-level check
|
||||
*/
|
||||
boolean isSystemCheck();
|
||||
|
||||
}
|
||||
|
||||
static class HeapSizeCheck implements BootstrapCheck.Check {
|
||||
|
@ -183,6 +225,11 @@ final class BootstrapCheck {
|
|||
return JvmInfo.jvmInfo().getConfiguredMaxHeapSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isSystemCheck() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class OsXFileDescriptorCheck extends FileDescriptorCheck {
|
||||
|
@ -233,6 +280,11 @@ final class BootstrapCheck {
|
|||
return ProcessProbe.getInstance().getMaxFileDescriptorCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isSystemCheck() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// visible for testing
|
||||
|
@ -259,6 +311,11 @@ final class BootstrapCheck {
|
|||
return Natives.isMemoryLocked();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isSystemCheck() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class MinMasterNodesCheck implements Check {
|
||||
|
@ -279,6 +336,12 @@ final class BootstrapCheck {
|
|||
return "please set [" + ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() +
|
||||
"] to a majority of the number of master eligible nodes in your cluster.";
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isSystemCheck() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class MaxNumberOfThreadsCheck implements Check {
|
||||
|
@ -305,6 +368,11 @@ final class BootstrapCheck {
|
|||
return JNANatives.MAX_NUMBER_OF_THREADS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isSystemCheck() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class MaxSizeVirtualMemoryCheck implements Check {
|
||||
|
@ -333,6 +401,11 @@ final class BootstrapCheck {
|
|||
return JNANatives.MAX_SIZE_VIRTUAL_MEMORY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isSystemCheck() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class MaxMapCountCheck implements Check {
|
||||
|
@ -396,6 +469,11 @@ final class BootstrapCheck {
|
|||
return Long.parseLong(procSysVmMaxMapCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isSystemCheck() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,5 +37,7 @@ public final class BootstrapSettings {
|
|||
Setting.boolSetting("bootstrap.seccomp", true, Property.NodeScope);
|
||||
public static final Setting<Boolean> CTRLHANDLER_SETTING =
|
||||
Setting.boolSetting("bootstrap.ctrlhandler", true, Property.NodeScope);
|
||||
public static final Setting<Boolean> IGNORE_SYSTEM_BOOTSTRAP_CHECKS =
|
||||
Setting.boolSetting("bootstrap.ignore_system_bootstrap_checks", false, Property.NodeScope);
|
||||
|
||||
}
|
||||
|
|
|
@ -139,15 +139,9 @@ public class AliasValidator extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
private void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
try {
|
||||
queryShardContext.reset();
|
||||
QueryParseContext queryParseContext = queryShardContext.newParseContext(parser);
|
||||
QueryBuilder<?> queryBuilder = QueryBuilder.rewriteQuery(queryParseContext.parseInnerQueryBuilder(), queryShardContext);
|
||||
queryBuilder.toFilter(queryShardContext);
|
||||
} finally {
|
||||
queryShardContext.reset();
|
||||
parser.close();
|
||||
}
|
||||
private static void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
QueryParseContext queryParseContext = queryShardContext.newParseContext(parser);
|
||||
QueryBuilder<?> queryBuilder = QueryBuilder.rewriteQuery(queryParseContext.parseInnerQueryBuilder(), queryShardContext);
|
||||
queryBuilder.toFilter(queryShardContext);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -408,6 +408,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
BootstrapSettings.MLOCKALL_SETTING,
|
||||
BootstrapSettings.SECCOMP_SETTING,
|
||||
BootstrapSettings.CTRLHANDLER_SETTING,
|
||||
BootstrapSettings.IGNORE_SYSTEM_BOOTSTRAP_CHECKS,
|
||||
IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING,
|
||||
IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING,
|
||||
IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING,
|
||||
|
|
|
@ -50,9 +50,11 @@ public final class HttpTransportSettings {
|
|||
public static final Setting<Integer> SETTING_PIPELINING_MAX_EVENTS =
|
||||
Setting.intSetting("http.pipelining.max_events", 10000, Property.NodeScope);
|
||||
public static final Setting<Boolean> SETTING_HTTP_COMPRESSION =
|
||||
Setting.boolSetting("http.compression", false, Property.NodeScope);
|
||||
Setting.boolSetting("http.compression", true, Property.NodeScope);
|
||||
// we intentionally use a different compression level as Netty here as our benchmarks have shown that a compression level of 3 is the
|
||||
// best compromise between reduction in network traffic and added latency. For more details please check #7309.
|
||||
public static final Setting<Integer> SETTING_HTTP_COMPRESSION_LEVEL =
|
||||
Setting.intSetting("http.compression_level", 6, Property.NodeScope);
|
||||
Setting.intSetting("http.compression_level", 3, Property.NodeScope);
|
||||
public static final Setting<List<String>> SETTING_HTTP_HOST =
|
||||
listSetting("http.host", emptyList(), Function.identity(), Property.NodeScope);
|
||||
public static final Setting<List<String>> SETTING_HTTP_PUBLISH_HOST =
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.http.netty;
|
||||
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
import org.jboss.netty.handler.codec.embedder.DecoderEmbedder;
|
||||
import org.jboss.netty.handler.codec.http.HttpContentDecompressor;
|
||||
import org.jboss.netty.handler.codec.http.HttpHeaders;
|
||||
|
||||
public class ESHttpContentDecompressor extends HttpContentDecompressor {
|
||||
private final boolean compression;
|
||||
|
||||
public ESHttpContentDecompressor(boolean compression) {
|
||||
super();
|
||||
this.compression = compression;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DecoderEmbedder<ChannelBuffer> newContentDecoder(String contentEncoding) throws Exception {
|
||||
if (compression) {
|
||||
// compression is enabled so handle the request according to the headers (compressed and uncompressed)
|
||||
return super.newContentDecoder(contentEncoding);
|
||||
} else {
|
||||
// if compression is disabled only allow "identity" (uncompressed) requests
|
||||
if (HttpHeaders.Values.IDENTITY.equals(contentEncoding)) {
|
||||
// nothing to handle here
|
||||
return null;
|
||||
} else {
|
||||
throw new TransportException("Support for compressed content is disabled. You can enable it with http.compression=true");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -70,6 +70,7 @@ import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
|
|||
import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory;
|
||||
import org.jboss.netty.handler.codec.http.HttpChunkAggregator;
|
||||
import org.jboss.netty.handler.codec.http.HttpContentCompressor;
|
||||
import org.jboss.netty.handler.codec.http.HttpContentDecompressor;
|
||||
import org.jboss.netty.handler.codec.http.HttpMethod;
|
||||
import org.jboss.netty.handler.codec.http.HttpRequestDecoder;
|
||||
import org.jboss.netty.handler.timeout.ReadTimeoutException;
|
||||
|
@ -544,19 +545,19 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
|||
requestDecoder.setMaxCumulationBufferComponents(transport.maxCompositeBufferComponents);
|
||||
}
|
||||
pipeline.addLast("decoder", requestDecoder);
|
||||
pipeline.addLast("decoder_compress", new ESHttpContentDecompressor(transport.compression));
|
||||
pipeline.addLast("decoder_compress", new HttpContentDecompressor());
|
||||
HttpChunkAggregator httpChunkAggregator = new HttpChunkAggregator((int) transport.maxContentLength.bytes());
|
||||
if (transport.maxCompositeBufferComponents != -1) {
|
||||
httpChunkAggregator.setMaxCumulationBufferComponents(transport.maxCompositeBufferComponents);
|
||||
}
|
||||
pipeline.addLast("aggregator", httpChunkAggregator);
|
||||
if (SETTING_CORS_ENABLED.get(transport.settings())) {
|
||||
pipeline.addLast("cors", new CorsHandler(transport.getCorsConfig()));
|
||||
}
|
||||
pipeline.addLast("encoder", new ESHttpResponseEncoder());
|
||||
if (transport.compression) {
|
||||
pipeline.addLast("encoder_compress", new HttpContentCompressor(transport.compressionLevel));
|
||||
}
|
||||
if (SETTING_CORS_ENABLED.get(transport.settings())) {
|
||||
pipeline.addLast("cors", new CorsHandler(transport.getCorsConfig()));
|
||||
}
|
||||
if (transport.pipelining) {
|
||||
pipeline.addLast("pipelining", new HttpPipeliningHandler(transport.pipeliningMaxEvents));
|
||||
}
|
||||
|
|
|
@ -382,7 +382,7 @@ public final class IndexSettings {
|
|||
*/
|
||||
synchronized boolean updateIndexMetaData(IndexMetaData indexMetaData) {
|
||||
final Settings newSettings = indexMetaData.getSettings();
|
||||
if (Version.indexCreated(newSettings) != version) {
|
||||
if (version.equals(Version.indexCreated(newSettings)) == false) {
|
||||
throw new IllegalArgumentException("version mismatch on settings update expected: " + version + " but was: " + Version.indexCreated(newSettings));
|
||||
}
|
||||
final String newUUID = newSettings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE);
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.search.RegexpQuery;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -204,6 +203,9 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
final Object index = node.get("index");
|
||||
if (Arrays.asList(null, "no", "not_analyzed", "analyzed").contains(index) == false) {
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [no], [not_analyzed] or [analyzed]");
|
||||
}
|
||||
final boolean keyword = index != null && "analyzed".equals(index) == false;
|
||||
|
||||
// Automatically upgrade simple mappings for ease of upgrade, otherwise fail
|
||||
|
@ -283,7 +285,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
node.put("index", false);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [no], [not_analyzed] or [analyzed]");
|
||||
}
|
||||
}
|
||||
final Object fielddataObject = node.get("fielddata");
|
||||
|
|
|
@ -207,7 +207,6 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder<?> queryBuilder) throws IOException {
|
||||
context.reset();
|
||||
// This means that fields in the query need to exist in the mapping prior to registering this query
|
||||
// The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired.
|
||||
//
|
||||
|
@ -222,11 +221,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
// as an analyzed string.
|
||||
context.setAllowUnmappedFields(false);
|
||||
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString);
|
||||
try {
|
||||
return queryBuilder.toQuery(context);
|
||||
} finally {
|
||||
context.reset();
|
||||
}
|
||||
return queryBuilder.toQuery(context);
|
||||
}
|
||||
|
||||
static QueryBuilder<?> parseQueryBuilder(QueryParseContext context, XContentLocation location) {
|
||||
|
|
|
@ -109,12 +109,12 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder<QB>>
|
|||
@Override
|
||||
public final Query toFilter(QueryShardContext context) throws IOException {
|
||||
Query result = null;
|
||||
final boolean originalIsFilter = context.isFilter;
|
||||
final boolean originalIsFilter = context.isFilter();
|
||||
try {
|
||||
context.isFilter = true;
|
||||
context.setIsFilter(true);
|
||||
result = toQuery(context);
|
||||
} finally {
|
||||
context.isFilter = originalIsFilter;
|
||||
context.setIsFilter(originalIsFilter);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -91,7 +91,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
private boolean allowUnmappedFields;
|
||||
private boolean mapUnmappedFieldAsString;
|
||||
private NestedScope nestedScope;
|
||||
boolean isFilter; // pkg private for testing
|
||||
private boolean isFilter;
|
||||
|
||||
public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService,
|
||||
MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
|
||||
|
@ -116,7 +116,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
this.types = source.getTypes();
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
private void reset() {
|
||||
allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
|
||||
this.lookup = null;
|
||||
this.namedQueries.clear();
|
||||
|
@ -183,6 +183,10 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
return isFilter;
|
||||
}
|
||||
|
||||
void setIsFilter(boolean isFilter) {
|
||||
this.isFilter = isFilter;
|
||||
}
|
||||
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
return mapperService.simpleMatchToIndexNames(pattern);
|
||||
}
|
||||
|
@ -369,5 +373,4 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
public final Index index() {
|
||||
return indexSettings.getIndex();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -680,6 +680,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
CompletionStats completionStats = new CompletionStats();
|
||||
try (final Engine.Searcher currentSearcher = acquireSearcher("completion_stats")) {
|
||||
completionStats.add(CompletionFieldStats.completionStats(currentSearcher.reader(), fields));
|
||||
// Necessary for 2.x shards:
|
||||
Completion090PostingsFormat postingsFormat = ((Completion090PostingsFormat)
|
||||
PostingsFormat.forName(Completion090PostingsFormat.CODEC_NAME));
|
||||
completionStats.add(postingsFormat.completionStats(currentSearcher.reader(), fields));
|
||||
|
|
|
@ -51,6 +51,8 @@ import static java.nio.charset.StandardCharsets.UTF_8;
|
|||
public final class IngestDocument {
|
||||
|
||||
public final static String INGEST_KEY = "_ingest";
|
||||
private static final String INGEST_KEY_PREFIX = INGEST_KEY + ".";
|
||||
private static final String SOURCE_PREFIX = SourceFieldMapper.NAME + ".";
|
||||
|
||||
static final String TIMESTAMP = "timestamp";
|
||||
|
||||
|
@ -600,6 +602,7 @@ public final class IngestDocument {
|
|||
}
|
||||
|
||||
private class FieldPath {
|
||||
|
||||
private final String[] pathElements;
|
||||
private final Object initialContext;
|
||||
|
||||
|
@ -608,13 +611,13 @@ public final class IngestDocument {
|
|||
throw new IllegalArgumentException("path cannot be null nor empty");
|
||||
}
|
||||
String newPath;
|
||||
if (path.startsWith(INGEST_KEY + ".")) {
|
||||
if (path.startsWith(INGEST_KEY_PREFIX)) {
|
||||
initialContext = ingestMetadata;
|
||||
newPath = path.substring(8, path.length());
|
||||
newPath = path.substring(INGEST_KEY_PREFIX.length(), path.length());
|
||||
} else {
|
||||
initialContext = sourceAndMetadata;
|
||||
if (path.startsWith(SourceFieldMapper.NAME + ".")) {
|
||||
newPath = path.substring(8, path.length());
|
||||
if (path.startsWith(SOURCE_PREFIX)) {
|
||||
newPath = path.substring(SOURCE_PREFIX.length(), path.length());
|
||||
} else {
|
||||
newPath = path;
|
||||
}
|
||||
|
@ -624,5 +627,6 @@ public final class IngestDocument {
|
|||
throw new IllegalArgumentException("path [" + path + "] is not valid");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
|
@ -38,6 +39,8 @@ import static org.hamcrest.CoreMatchers.instanceOf;
|
|||
import static org.hamcrest.Matchers.hasToString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.reset;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class BootstrapCheckTests extends ESTestCase {
|
||||
|
@ -113,6 +116,11 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
public String errorMessage() {
|
||||
return "first";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSystemCheck() {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
new BootstrapCheck.Check() {
|
||||
@Override
|
||||
|
@ -124,11 +132,16 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
public String errorMessage() {
|
||||
return "second";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSystemCheck() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
final RuntimeException e =
|
||||
expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, checks, "testExceptionAggregation"));
|
||||
expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, false, checks, "testExceptionAggregation"));
|
||||
assertThat(e, hasToString(allOf(containsString("bootstrap checks failed"), containsString("first"), containsString("second"))));
|
||||
final Throwable[] suppressed = e.getSuppressed();
|
||||
assertThat(suppressed.length, equalTo(2));
|
||||
|
@ -159,7 +172,7 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
final RuntimeException e =
|
||||
expectThrows(
|
||||
RuntimeException.class,
|
||||
() -> BootstrapCheck.check(true, Collections.singletonList(check), "testHeapSizeCheck"));
|
||||
() -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testHeapSizeCheck"));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString("initial heap size [" + initialHeapSize.get() + "] " +
|
||||
|
@ -167,7 +180,7 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
|
||||
initialHeapSize.set(maxHeapSize.get());
|
||||
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testHeapSizeCheck");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testHeapSizeCheck");
|
||||
|
||||
// nothing should happen if the initial heap size or the max
|
||||
// heap size is not available
|
||||
|
@ -176,7 +189,7 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
} else {
|
||||
maxHeapSize.set(0);
|
||||
}
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testHeapSizeCheck");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testHeapSizeCheck");
|
||||
}
|
||||
|
||||
public void testFileDescriptorLimits() {
|
||||
|
@ -202,17 +215,17 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
|
||||
final RuntimeException e =
|
||||
expectThrows(RuntimeException.class,
|
||||
() -> BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimits"));
|
||||
() -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testFileDescriptorLimits"));
|
||||
assertThat(e.getMessage(), containsString("max file descriptors"));
|
||||
|
||||
maxFileDescriptorCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE));
|
||||
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimits");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testFileDescriptorLimits");
|
||||
|
||||
// nothing should happen if current file descriptor count is
|
||||
// not available
|
||||
maxFileDescriptorCount.set(-1);
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimits");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testFileDescriptorLimits");
|
||||
}
|
||||
|
||||
public void testFileDescriptorLimitsThrowsOnInvalidLimit() {
|
||||
|
@ -255,13 +268,17 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
if (testCase.shouldFail) {
|
||||
final RuntimeException e = expectThrows(
|
||||
RuntimeException.class,
|
||||
() -> BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimitsThrowsOnInvalidLimit"));
|
||||
() -> BootstrapCheck.check(
|
||||
true,
|
||||
false,
|
||||
Collections.singletonList(check),
|
||||
"testFileDescriptorLimitsThrowsOnInvalidLimit"));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString("memory locking requested for elasticsearch process but memory is not locked"));
|
||||
} else {
|
||||
// nothing should happen
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testFileDescriptorLimitsThrowsOnInvalidLimit");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testFileDescriptorLimitsThrowsOnInvalidLimit");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -278,17 +295,17 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
|
||||
final RuntimeException e = expectThrows(
|
||||
RuntimeException.class,
|
||||
() -> BootstrapCheck.check(true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"));
|
||||
() -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxNumberOfThreadsCheck"));
|
||||
assertThat(e.getMessage(), containsString("max number of threads"));
|
||||
|
||||
maxNumberOfThreads.set(randomIntBetween(limit + 1, Integer.MAX_VALUE));
|
||||
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxNumberOfThreadsCheck");
|
||||
|
||||
// nothing should happen if current max number of threads is
|
||||
// not available
|
||||
maxNumberOfThreads.set(-1);
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testMaxNumberOfThreadsCheck");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxNumberOfThreadsCheck");
|
||||
}
|
||||
|
||||
public void testMaxSizeVirtualMemory() {
|
||||
|
@ -309,17 +326,17 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
|
||||
final RuntimeException e = expectThrows(
|
||||
RuntimeException.class,
|
||||
() -> BootstrapCheck.check(true, Collections.singletonList(check), "testMaxSizeVirtualMemory"));
|
||||
() -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxSizeVirtualMemory"));
|
||||
assertThat(e.getMessage(), containsString("max size virtual memory"));
|
||||
|
||||
maxSizeVirtualMemory.set(rlimInfinity);
|
||||
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testMaxSizeVirtualMemory");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxSizeVirtualMemory");
|
||||
|
||||
// nothing should happen if max size virtual memory is not
|
||||
// available
|
||||
maxSizeVirtualMemory.set(Long.MIN_VALUE);
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testMaxSizeVirtualMemory");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxSizeVirtualMemory");
|
||||
}
|
||||
|
||||
public void testMaxMapCountCheck() {
|
||||
|
@ -334,17 +351,17 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
|
||||
RuntimeException e = expectThrows(
|
||||
RuntimeException.class,
|
||||
() -> BootstrapCheck.check(true, Collections.singletonList(check), "testMaxMapCountCheck"));
|
||||
() -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxMapCountCheck"));
|
||||
assertThat(e.getMessage(), containsString("max virtual memory areas vm.max_map_count"));
|
||||
|
||||
maxMapCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE));
|
||||
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testMaxMapCountCheck");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxMapCountCheck");
|
||||
|
||||
// nothing should happen if current vm.max_map_count is not
|
||||
// available
|
||||
maxMapCount.set(-1);
|
||||
BootstrapCheck.check(true, Collections.singletonList(check), "testMaxMapCountCheck");
|
||||
BootstrapCheck.check(true, false, Collections.singletonList(check), "testMaxMapCountCheck");
|
||||
}
|
||||
|
||||
public void testMinMasterNodes() {
|
||||
|
@ -353,7 +370,42 @@ public class BootstrapCheckTests extends ESTestCase {
|
|||
assertThat(check.check(), not(equalTo(isSet)));
|
||||
List<BootstrapCheck.Check> defaultChecks = BootstrapCheck.checks(Settings.EMPTY);
|
||||
|
||||
expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, defaultChecks, "testMinMasterNodes"));
|
||||
expectThrows(RuntimeException.class, () -> BootstrapCheck.check(true, false, defaultChecks, "testMinMasterNodes"));
|
||||
}
|
||||
|
||||
public void testIgnoringSystemChecks() {
|
||||
BootstrapCheck.Check check = new BootstrapCheck.Check() {
|
||||
@Override
|
||||
public boolean check() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String errorMessage() {
|
||||
return "error";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSystemCheck() {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
final RuntimeException notIgnored = expectThrows(
|
||||
RuntimeException.class,
|
||||
() -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testIgnoringSystemChecks"));
|
||||
assertThat(notIgnored, hasToString(containsString("error")));
|
||||
|
||||
final ESLogger logger = mock(ESLogger.class);
|
||||
|
||||
// nothing should happen if we ignore system checks
|
||||
BootstrapCheck.check(true, true, Collections.singletonList(check), logger);
|
||||
verify(logger).warn("error");
|
||||
reset(logger);
|
||||
|
||||
// nothing should happen if we ignore all checks
|
||||
BootstrapCheck.check(false, randomBoolean(), Collections.singletonList(check), logger);
|
||||
verify(logger).warn("error");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,146 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.http.netty;
|
||||
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpException;
|
||||
import org.apache.http.HttpHeaders;
|
||||
import org.apache.http.HttpResponseInterceptor;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.protocol.HttpContext;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.http.HttpTransportSettings;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.rest.client.http.HttpResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 1, numClientNodes = 1)
|
||||
public class NettyHttpCompressionIT extends ESIntegTestCase {
|
||||
private static final String GZIP_ENCODING = "gzip";
|
||||
|
||||
private static final String SAMPLE_DOCUMENT = "{\n" +
|
||||
" \"name\": {\n" +
|
||||
" \"first name\": \"Steve\",\n" +
|
||||
" \"last name\": \"Jobs\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put(NetworkModule.HTTP_ENABLED.getKey(), true)
|
||||
.put(HttpTransportSettings.SETTING_HTTP_COMPRESSION.getKey(), true)
|
||||
.build();
|
||||
}
|
||||
|
||||
public void testCompressesResponseIfRequested() throws Exception {
|
||||
ensureGreen();
|
||||
|
||||
// we need to intercept early, otherwise internal logic in HttpClient will just remove the header and we cannot verify it
|
||||
ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor();
|
||||
CloseableHttpClient internalClient = HttpClients.custom().addInterceptorFirst(headerExtractor).build();
|
||||
|
||||
HttpResponse response = httpClient(internalClient).path("/").addHeader(HttpHeaders.ACCEPT_ENCODING, GZIP_ENCODING).execute();
|
||||
assertEquals(200, response.getStatusCode());
|
||||
assertTrue(headerExtractor.hasContentEncodingHeader());
|
||||
assertEquals(GZIP_ENCODING, headerExtractor.getContentEncodingHeader().getValue());
|
||||
}
|
||||
|
||||
public void testUncompressedResponseByDefault() throws Exception {
|
||||
ensureGreen();
|
||||
|
||||
ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor();
|
||||
CloseableHttpClient internalClient = HttpClients
|
||||
.custom()
|
||||
.disableContentCompression()
|
||||
.addInterceptorFirst(headerExtractor)
|
||||
.build();
|
||||
|
||||
HttpResponse response = httpClient(internalClient).path("/").execute();
|
||||
assertEquals(200, response.getStatusCode());
|
||||
assertFalse(headerExtractor.hasContentEncodingHeader());
|
||||
}
|
||||
|
||||
public void testCanInterpretUncompressedRequest() throws Exception {
|
||||
ensureGreen();
|
||||
|
||||
ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor();
|
||||
CloseableHttpClient internalClient = HttpClients
|
||||
.custom()
|
||||
// this disable content compression in both directions (request and response)
|
||||
.disableContentCompression()
|
||||
.addInterceptorFirst(headerExtractor)
|
||||
.build();
|
||||
|
||||
HttpResponse response = httpClient(internalClient)
|
||||
.path("/company/employees/1")
|
||||
.method("POST")
|
||||
.body(SAMPLE_DOCUMENT)
|
||||
.execute();
|
||||
|
||||
assertEquals(201, response.getStatusCode());
|
||||
assertFalse(headerExtractor.hasContentEncodingHeader());
|
||||
}
|
||||
|
||||
public void testCanInterpretCompressedRequest() throws Exception {
|
||||
ensureGreen();
|
||||
|
||||
ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor();
|
||||
// we don't call #disableContentCompression() hence the client will send the content compressed
|
||||
CloseableHttpClient internalClient = HttpClients.custom().addInterceptorFirst(headerExtractor).build();
|
||||
|
||||
HttpResponse response = httpClient(internalClient)
|
||||
.path("/company/employees/2")
|
||||
.method("POST")
|
||||
.body(SAMPLE_DOCUMENT)
|
||||
.execute();
|
||||
|
||||
assertEquals(201, response.getStatusCode());
|
||||
assertTrue(headerExtractor.hasContentEncodingHeader());
|
||||
assertEquals(GZIP_ENCODING, headerExtractor.getContentEncodingHeader().getValue());
|
||||
}
|
||||
|
||||
private static class ContentEncodingHeaderExtractor implements HttpResponseInterceptor {
|
||||
private Header contentEncodingHeader;
|
||||
|
||||
@Override
|
||||
public void process(org.apache.http.HttpResponse response, HttpContext context) throws HttpException, IOException {
|
||||
final Header[] headers = response.getHeaders(HttpHeaders.CONTENT_ENCODING);
|
||||
if (headers.length == 1) {
|
||||
this.contentEncodingHeader = headers[0];
|
||||
} else if (headers.length > 1) {
|
||||
throw new AssertionError("Expected none or one content encoding header but got " + headers.length + " headers.");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasContentEncodingHeader() {
|
||||
return contentEncodingHeader != null;
|
||||
}
|
||||
|
||||
public Header getContentEncodingHeader() {
|
||||
return contentEncodingHeader;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -110,6 +110,15 @@ public class IndexSettingsTests extends ESTestCase {
|
|||
assertTrue(ex.getMessage(), ex.getMessage().startsWith("version mismatch on settings update expected: "));
|
||||
}
|
||||
|
||||
// use version number that is unknown
|
||||
metaData = newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromId(999999))
|
||||
.build());
|
||||
settings = new IndexSettings(metaData, Settings.EMPTY);
|
||||
assertEquals(Version.fromId(999999), settings.getIndexVersionCreated());
|
||||
assertEquals("_na_", settings.getUUID());
|
||||
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
Version.fromId(999999)).put("index.test.setting.int", 42).build()));
|
||||
|
||||
metaData = newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build());
|
||||
settings = new IndexSettings(metaData, Settings.EMPTY);
|
||||
|
|
|
@ -103,6 +103,22 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase {
|
|||
assertEquals(IndexOptions.NONE, field.fieldType().indexOptions());
|
||||
}
|
||||
|
||||
public void testIllegalIndexValue() throws IOException {
|
||||
IndexService indexService = createIndex("test");
|
||||
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "string")
|
||||
.field("index", false)
|
||||
.endObject()
|
||||
.endObject() .endObject().endObject().string();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> parser.parse("type", new CompressedXContent(mapping)));
|
||||
assertThat(e.getMessage(),
|
||||
containsString("Can't parse [index] value [false] for field [field], expected [no], [not_analyzed] or [analyzed]"));
|
||||
}
|
||||
|
||||
public void testNotSupportedUpgrade() throws IOException {
|
||||
IndexService indexService = createIndex("test");
|
||||
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
|
||||
|
|
|
@ -51,7 +51,7 @@ Note that you have to set the cluster name if you use one different than
|
|||
|
||||
[source,java]
|
||||
--------------------------------------------------
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
Settings settings = Settings.builder()
|
||||
.put("cluster.name", "myClusterName").build();
|
||||
Client client = TransportClient.builder().settings(settings).build();
|
||||
//Add transport addresses and do something with the client...
|
||||
|
|
|
@ -34,15 +34,21 @@ The response looks like:
|
|||
"reason" : "INDEX_CREATED", <2>
|
||||
"at" : "2016-03-22T20:04:23.620Z"
|
||||
},
|
||||
"nodes" : { <3>
|
||||
"allocation_delay_ms" : 0, <3>
|
||||
"remaining_delay_ms" : 0, <4>
|
||||
"nodes" : {
|
||||
"V-Spi0AyRZ6ZvKbaI3691w" : {
|
||||
"node_name" : "node1",
|
||||
"node_attributes" : { <4>
|
||||
"node_attributes" : { <5>
|
||||
"bar" : "baz"
|
||||
},
|
||||
"final_decision" : "NO", <5>
|
||||
"weight" : 0.06666675, <6>
|
||||
"decisions" : [ { <7>
|
||||
"store" : {
|
||||
"shard_copy" : "NONE" <6>
|
||||
},
|
||||
"final_decision" : "NO", <7>
|
||||
"final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
"weight" : 0.06666675, <8>
|
||||
"decisions" : [ { <9>
|
||||
"decider" : "filter",
|
||||
"decision" : "NO",
|
||||
"explanation" : "node does not match index include filters [foo:\"bar\"]"
|
||||
|
@ -54,7 +60,11 @@ The response looks like:
|
|||
"bar" : "baz",
|
||||
"foo" : "bar"
|
||||
},
|
||||
"store" : {
|
||||
"shard_copy" : "AVAILABLE"
|
||||
},
|
||||
"final_decision" : "NO",
|
||||
"final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
"weight" : -1.3833332,
|
||||
"decisions" : [ {
|
||||
"decider" : "same_shard",
|
||||
|
@ -65,7 +75,11 @@ The response looks like:
|
|||
"PzdyMZGXQdGhqTJHF_hGgA" : {
|
||||
"node_name" : "node3",
|
||||
"node_attributes" : { },
|
||||
"store" : {
|
||||
"shard_copy" : "NONE"
|
||||
},
|
||||
"final_decision" : "NO",
|
||||
"final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
"weight" : 2.3166666,
|
||||
"decisions" : [ {
|
||||
"decider" : "filter",
|
||||
|
@ -78,11 +92,13 @@ The response looks like:
|
|||
--------------------------------------------------
|
||||
<1> Whether the shard is assigned or unassigned
|
||||
<2> Reason for the shard originally becoming unassigned
|
||||
<3> List of node decisions about the shard
|
||||
<4> User-added attributes the node has
|
||||
<5> Final decision for whether the shard is allowed to be allocated to this node
|
||||
<6> Weight for how much the allocator would like to allocate the shard to this node
|
||||
<7> List of decisions factoring into final decision
|
||||
<3> Configured delay before the shard can be allocated
|
||||
<4> Remaining delay before the shard can be allocated
|
||||
<5> User-added attributes the node has
|
||||
<6> The shard copy information for this node and error (if applicable)
|
||||
<7> Final decision and explanation of whether the shard can be allocated to this node
|
||||
<8> Weight for how much the allocator would like to allocate the shard to this node
|
||||
<9> List of node decisions factoring into final decision about the shard
|
||||
|
||||
For a shard that is already assigned, the output looks similar to:
|
||||
|
||||
|
@ -97,13 +113,19 @@ For a shard that is already assigned, the output looks similar to:
|
|||
},
|
||||
"assigned" : true,
|
||||
"assigned_node_id" : "Qc6VL8c5RWaw1qXZ0Rg57g", <1>
|
||||
"allocation_delay_ms" : 0,
|
||||
"remaining_delay_ms" : 0,
|
||||
"nodes" : {
|
||||
"V-Spi0AyRZ6ZvKbaI3691w" : {
|
||||
"node_name" : "Susan Storm",
|
||||
"node_attributes" : {
|
||||
"bar" : "baz"
|
||||
},
|
||||
"store" : {
|
||||
"shard_copy" : "NONE"
|
||||
},
|
||||
"final_decision" : "NO",
|
||||
"final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
"weight" : 1.4499999,
|
||||
"decisions" : [ {
|
||||
"decider" : "filter",
|
||||
|
@ -117,7 +139,11 @@ For a shard that is already assigned, the output looks similar to:
|
|||
"bar" : "baz",
|
||||
"foo" : "bar"
|
||||
},
|
||||
"final_decision" : "CURRENTLY_ASSIGNED", <2>
|
||||
"store" : {
|
||||
"shard_copy" : "AVAILABLE"
|
||||
},
|
||||
"final_decision" : "ALREADY_ASSIGNED", <2>
|
||||
"final_explanation" : "the shard is already assigned to this node",
|
||||
"weight" : 0.0,
|
||||
"decisions" : [ {
|
||||
"decider" : "same_shard",
|
||||
|
@ -128,7 +154,11 @@ For a shard that is already assigned, the output looks similar to:
|
|||
"PzdyMZGXQdGhqTJHF_hGgA" : {
|
||||
"node_name" : "The Symbiote",
|
||||
"node_attributes" : { },
|
||||
"store" : {
|
||||
"shard_copy" : "NONE"
|
||||
},
|
||||
"final_decision" : "NO",
|
||||
"final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
"weight" : 3.6999998,
|
||||
"decisions" : [ {
|
||||
"decider" : "filter",
|
||||
|
@ -140,7 +170,7 @@ For a shard that is already assigned, the output looks similar to:
|
|||
}
|
||||
--------------------------------------------------
|
||||
<1> Node the shard is currently assigned to
|
||||
<2> The decision is "CURRENTLY_ASSIGNED" because the shard is currently assigned to this node
|
||||
<2> The decision is "ALREADY_ASSIGNED" because the shard is currently assigned to this node
|
||||
|
||||
You can also have Elasticsearch explain the allocation of the first unassigned
|
||||
shard it finds by sending an empty body, such as:
|
||||
|
|
|
@ -8,9 +8,8 @@ The store module allows you to control how index data is stored and accessed on
|
|||
=== File system storage types
|
||||
|
||||
There are different file system implementations or _storage types_. The best
|
||||
one for the operating environment will be automatically chosen: `mmapfs` on
|
||||
Windows 64bit, `simplefs` on Windows 32bit, and `default` (hybrid `niofs` and
|
||||
`mmapfs`) for the rest.
|
||||
one for the operating environment will be automatically chosen: `simplefs` on
|
||||
Windows 32bit, `niofs` on other 32bit systems and `mmapfs` on 64bit systems.
|
||||
|
||||
This can be overridden for all indices by adding this to the
|
||||
`config/elasticsearch.yml` file:
|
||||
|
@ -61,12 +60,13 @@ process equal to the size of the file being mapped. Before using this
|
|||
class, be sure you have allowed plenty of
|
||||
<<vm-max-map-count,virtual address space>>.
|
||||
|
||||
[[default_fs]]`default_fs`::
|
||||
[[default_fs]]`default_fs` deprecated[5.0.0, The `default_fs` store type is deprecated - use `mmapfs` instead]::
|
||||
|
||||
The `default` type is a hybrid of NIO FS and MMapFS, which chooses the best
|
||||
file system for each type of file. Currently only the Lucene term dictionary
|
||||
and doc values files are memory mapped to reduce the impact on the operating
|
||||
system. All other files are opened using Lucene `NIOFSDirectory`. Address
|
||||
space settings (<<vm-max-map-count>>) might also apply if your term
|
||||
dictionaries are large.
|
||||
file system for each type of file. Currently only the Lucene term dictionary,
|
||||
doc values and points files are memory mapped to reduce the impact on the
|
||||
operating system. All other files are opened using Lucene `NIOFSDirectory`.
|
||||
Address space settings (<<vm-max-map-count>>) might also apply if your term
|
||||
dictionary are large, if you index many fields that use points (numerics, dates
|
||||
and ip addresses) or if you have many fields with doc values.
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[[elasticsearch-reference]]
|
||||
= Elasticsearch Reference
|
||||
|
||||
:version: 5.0.0-alpha1
|
||||
:version: 5.0.0-alpha2
|
||||
:major-version: 5.x
|
||||
:branch: master
|
||||
:jdk: 1.8.0_73
|
||||
|
|
|
@ -108,3 +108,20 @@ GET my_index/_search
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Also beware that colons are special characters to the
|
||||
<<query-dsl-query-string-query,`query_string`>> query, so ipv6 addresses will
|
||||
need to be escaped. The easiest way to do so is to put quotes around the
|
||||
searched value:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET t/_search
|
||||
{
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"query": "ip_addr:\"2001:db8::/48\""
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -62,7 +62,7 @@ The following parameters are accepted by `keyword` fields:
|
|||
|
||||
<<ignore-above,`ignore_above`>>::
|
||||
|
||||
Do not index or analyze any string longer than this value. Defaults to
|
||||
Do not index any string longer than this value. Defaults to
|
||||
`2147483647` so that all values would be accepted.
|
||||
|
||||
<<include-in-all,`include_in_all`>>::
|
||||
|
|
|
@ -36,6 +36,7 @@ way to do this is to upgrade to Elasticsearch 2.3 or later and to use the
|
|||
* <<breaking_50_index_apis>>
|
||||
* <<breaking_50_settings_changes>>
|
||||
* <<breaking_50_allocation>>
|
||||
* <<breaking_50_http_changes>>
|
||||
* <<breaking_50_rest_api_changes>>
|
||||
* <<breaking_50_cat_api>>
|
||||
* <<breaking_50_java_api_changes>>
|
||||
|
@ -60,6 +61,8 @@ include::migrate_5_0/settings.asciidoc[]
|
|||
|
||||
include::migrate_5_0/allocation.asciidoc[]
|
||||
|
||||
include::migrate_5_0/http.asciidoc[]
|
||||
|
||||
include::migrate_5_0/rest.asciidoc[]
|
||||
|
||||
include::migrate_5_0/cat.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
[[breaking_50_http_changes]]
|
||||
=== HTTP changes
|
||||
|
||||
==== Compressed HTTP requests are always accepted
|
||||
|
||||
Before 5.0, Elasticsearch accepted compressed HTTP requests only if the setting
|
||||
`http.compressed` was set to `true`. Elasticsearch accepts compressed requests
|
||||
now but will continue to send compressed responses only if `http.compressed`
|
||||
is set to `true`.
|
|
@ -48,10 +48,10 @@ to `4kb`
|
|||
|
||||
|
||||
|`http.compression` |Support for compression when possible (with
|
||||
Accept-Encoding). Defaults to `false`.
|
||||
Accept-Encoding). Defaults to `true`.
|
||||
|
||||
|`http.compression_level` |Defines the compression level to use.
|
||||
Defaults to `6`.
|
||||
|`http.compression_level` |Defines the compression level to use for HTTP responses. Valid values are in the range of 1 (minimum compression)
|
||||
and 9 (maximum compression). Defaults to `3`.
|
||||
|
||||
|`http.cors.enabled` |Enable or disable cross-origin resource sharing,
|
||||
i.e. whether a browser on another origin can do requests to
|
||||
|
|
|
@ -23,7 +23,7 @@ to specify the language of the script. Plugins are available for following langu
|
|||
|groovy |no |built-in
|
||||
|expression |yes |built-in
|
||||
|mustache |yes |built-in
|
||||
/painless /yes /built-in (module)
|
||||
|painless |yes |built-in (module)
|
||||
|javascript |no |{plugins}/lang-javascript.html[elasticsearch-lang-javascript]
|
||||
|python |no |{plugins}/lang-python.html[elasticsearch-lang-python]
|
||||
|=======================================================================
|
||||
|
@ -455,29 +455,98 @@ for details on what operators and functions are available.
|
|||
|
||||
Variables in `expression` scripts are available to access:
|
||||
|
||||
* Single valued document fields, e.g. `doc['myfield'].value`
|
||||
* Single valued document fields can also be accessed without `.value` e.g. `doc['myfield']`
|
||||
* document fields, e.g. `doc['myfield'].value`
|
||||
* variables and methods that the field supports, e.g. `doc['myfield'].empty`
|
||||
* Parameters passed into the script, e.g. `mymodifier`
|
||||
* The current document's score, `_score` (only available when used in a `script_score`)
|
||||
|
||||
Variables in `expression` scripts that are of type `date` may use the following member methods:
|
||||
[float]
|
||||
=== Expressions API for numeric fields
|
||||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
|Expression |Description
|
||||
|`doc['field_name'].value` |The native value of the field. For example,
|
||||
if its a short type, it will be short.
|
||||
|
||||
* getYear()
|
||||
* getMonth()
|
||||
* getDayOfMonth()
|
||||
* getHourOfDay()
|
||||
* getMinutes()
|
||||
* getSeconds()
|
||||
|`doc['field_name'].empty` |A boolean indicating if the field has no
|
||||
values within the doc.
|
||||
|
||||
|`doc['field_name'].min()` |The minimum value of the field in this document.
|
||||
|
||||
|`doc['field_name'].max()` |The maximum value of the field in this document.
|
||||
|
||||
|`doc['field_name'].median()` |The median value of the field in this document.
|
||||
|
||||
|`doc['field_name'].avg()` |The average of the values in this document.
|
||||
|
||||
|`doc['field_name'].sum()` |The sum of the values in this document.
|
||||
|
||||
|`doc['field_name'].count()` |The number of values in this document.
|
||||
|=======================================================================
|
||||
|
||||
When a document is missing the field completely, by default the value will be treated as `0`.
|
||||
You can treat it as another value instead, e.g. `doc['myfield'].empty ? 100 : doc['myfield'].value`
|
||||
|
||||
When a document has multiple values for the field, by default the minimum value is returned.
|
||||
You can choose a different value instead, e.g. `doc['myfield'].sum()`.
|
||||
|
||||
When a document is missing the field completely, by default the value will be treated as `0`.
|
||||
|
||||
Boolean fields are exposed as numerics, with `true` mapped to `1` and `false` mapped to `0`.
|
||||
For example: `doc['on_sale'] ? doc['price'] * 0.5 : doc['price']`
|
||||
|
||||
[float]
|
||||
=== Additional methods for date fields
|
||||
Date fields are treated as the number of milliseconds since January 1, 1970 and
|
||||
support the numeric API above, with these additional methods:
|
||||
|
||||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
|Expression |Description
|
||||
|`doc['field_name'].getYear()` |Year component, e.g. `1970`.
|
||||
|
||||
|`doc['field_name'].getMonth()` |Month component (0-11), e.g. `0` for January.
|
||||
|
||||
|`doc['field_name'].getDayOfMonth()` |Day component, e.g. `1` for the first of the month.
|
||||
|
||||
|`doc['field_name'].getHourOfDay()` |Hour component (0-23)
|
||||
|
||||
|`doc['field_name'].getMinutes()` |Minutes component (0-59)
|
||||
|
||||
|`doc['field_name'].getSeconds()` |Seconds component (0-59)
|
||||
|=======================================================================
|
||||
|
||||
The following example shows the difference in years between the `date` fields date0 and date1:
|
||||
|
||||
`doc['date1'].getYear() - doc['date0'].getYear()`
|
||||
|
||||
[float]
|
||||
=== Expressions API for `geo_point` fields
|
||||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
|Expression |Description
|
||||
|`doc['field_name'].empty` |A boolean indicating if the field has no
|
||||
values within the doc.
|
||||
|
||||
|`doc['field_name'].lat` |The latitude of the geo point.
|
||||
|
||||
|`doc['field_name'].lon` |The longitude of the geo point.
|
||||
|=======================================================================
|
||||
|
||||
The following example computes distance in kilometers from Washington, DC:
|
||||
|
||||
`haversin(38.9072, 77.0369, doc['field_name'].lat, doc['field_name'].lon)`
|
||||
|
||||
In this example the coordinates could have been passed as parameters to the script,
|
||||
e.g. based on geolocation of the user.
|
||||
|
||||
[float]
|
||||
=== Expressions limitations
|
||||
|
||||
There are a few limitations relative to other script languages:
|
||||
|
||||
* Only numeric fields may be accessed
|
||||
* Only numeric, boolean, date, and geo_point fields may be accessed
|
||||
* Stored fields are not available
|
||||
* If a field is sparse (only some documents contain a value), documents missing the field will have a value of `0`
|
||||
|
||||
[float]
|
||||
=== Score
|
||||
|
|
|
@ -148,7 +148,7 @@ you wish to inhibit this, set `"boost_mode": "replace"`
|
|||
The `weight` score allows you to multiply the score by the provided
|
||||
`weight`. This can sometimes be desired since boost value set on
|
||||
specific queries gets normalized, while for this score function it does
|
||||
not.
|
||||
not. The number value is of type float.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -5,9 +5,11 @@
|
|||
--
|
||||
This section summarizes the changes in each release.
|
||||
|
||||
* <<release-notes-5.0.0-alpha2>>
|
||||
* <<release-notes-5.0.0-alpha1>>
|
||||
* <<release-notes-5.0.0-alpha1-2x>>
|
||||
|
||||
--
|
||||
include::release-notes/5.0.0-alpha2.asciidoc[]
|
||||
include::release-notes/5.0.0-alpha1.asciidoc[]
|
||||
include::release-notes/5.0.0-alpha1-2x.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,255 @@
|
|||
[[release-notes-5.0.0-alpha2]]
|
||||
== 5.0.0-alpha2 Release Notes
|
||||
|
||||
Also see <<breaking-changes-5.0>>.
|
||||
|
||||
[[breaking-5.0.0-alpha2]]
|
||||
[float]
|
||||
=== Breaking changes
|
||||
|
||||
Analysis::
|
||||
* Analyze API : Rename filters/token_filters/char_filter in Analyze API in master {pull}17843[#17843] (issue: {issue}15189[#15189])
|
||||
|
||||
Cluster::
|
||||
* Remove validation errors from cluster health response {pull}17773[#17773] (issue: {issue}16979[#16979])
|
||||
|
||||
Indexed Scripts/Templates::
|
||||
* Store indexed scripts in the cluster state instead of the `.scripts` index {pull}17650[#17650] (issue: {issue}16651[#16651])
|
||||
|
||||
Packaging::
|
||||
* Add JVM options configuration file {pull}17675[#17675] (issue: {issue}17121[#17121])
|
||||
|
||||
Percolator::
|
||||
* Remove `.percolator` type in favour of `percolator` field type {pull}17560[#17560]
|
||||
|
||||
REST::
|
||||
* Remove camelCase support {pull}17933[#17933] (issue: {issue}8988[#8988])
|
||||
* Remove 'case' parameter from rest apis {pull}17774[#17774] (issue: {issue}8988[#8988])
|
||||
* Disallow unquoted field names {pull}15351[#15351] (issue: {issue}9800[#9800])
|
||||
|
||||
Settings::
|
||||
* Remove `action.get.realtime` setting {pull}17857[#17857] (issue: {issue}12543[#12543])
|
||||
* Remove Settings.settingsBuilder. {pull}17619[#17619]
|
||||
|
||||
|
||||
|
||||
[[deprecation-5.0.0-alpha2]]
|
||||
[float]
|
||||
=== Deprecations
|
||||
|
||||
Query DSL::
|
||||
* Deprecate Indices query {pull}17710[#17710] (issue: {issue}12017[#12017])
|
||||
* Deprecate mlt, in and geo_bbox query name shortcuts {pull}17507[#17507]
|
||||
|
||||
Query Refactoring::
|
||||
* Splits `phrase` and `phrase_prefix` in match query into `MatchPhraseQueryBuilder` and `MatchPhrasePrefixQueryBuilder` {pull}17508[#17508]
|
||||
|
||||
|
||||
|
||||
[[feature-5.0.0-alpha2]]
|
||||
[float]
|
||||
=== New features
|
||||
|
||||
Analysis::
|
||||
* Add `fingerprint` token filter and `fingerprint` analyzer {pull}17873[#17873] (issue: {issue}13325[#13325])
|
||||
|
||||
Plugin Analysis ICU::
|
||||
* Adding support for customizing the rule file in ICU tokenizer {pull}13651[#13651] (issue: {issue}13146[#13146])
|
||||
|
||||
|
||||
|
||||
[[enhancement-5.0.0-alpha2]]
|
||||
[float]
|
||||
=== Enhancements
|
||||
|
||||
CAT API::
|
||||
* Add _cat/tasks {pull}17551[#17551]
|
||||
* Cat health supports ts=0 option {pull}13508[#13508] (issue: {issue}10109[#10109])
|
||||
|
||||
Cache::
|
||||
* Allow the query cache to be disabled. {pull}16268[#16268] (issue: {issue}15802[#15802])
|
||||
|
||||
Cluster::
|
||||
* Adds tombstones to cluster state for index deletions {pull}17265[#17265] (issues: {issue}16358[#16358], {issue}17435[#17435])
|
||||
* Enable acked indexing {pull}17038[#17038] (issue: {issue}7572[#7572])
|
||||
|
||||
Core::
|
||||
* Kill thread local leak {pull}17921[#17921] (issues: {issue}283[#283], {issue}630[#630])
|
||||
* Add heap size bootstrap check {pull}17728[#17728] (issue: {issue}17490[#17490])
|
||||
* Remove hostname from NetworkAddress.format {pull}17601[#17601] (issue: {issue}17604[#17604])
|
||||
* Bootstrapping bootstrap checks {pull}17595[#17595] (issues: {issue}17474[#17474], {issue}17570[#17570])
|
||||
* Add max map count check {pull}16944[#16944]
|
||||
|
||||
Geo::
|
||||
* Enhanced lat/long error handling {pull}16833[#16833] (issue: {issue}16137[#16137])
|
||||
|
||||
Index APIs::
|
||||
* Fail hot_threads in a better way if unsupported by JDK {pull}15909[#15909]
|
||||
|
||||
Ingest::
|
||||
* Streamline option naming for several processors {pull}17892[#17892] (issue: {issue}17835[#17835])
|
||||
|
||||
Internal::
|
||||
* Makes Script type writeable {pull}17908[#17908] (issue: {issue}17753[#17753])
|
||||
* FiltersAggregatorBuilder: Don't create new context for inner parsing {pull}17851[#17851]
|
||||
* Clean up serialization on some stats {pull}17832[#17832] (issue: {issue}17085[#17085])
|
||||
* Normalize registration for SignificanceHeuristics {pull}17830[#17830] (issue: {issue}17085[#17085])
|
||||
* Make (read|write)NamedWriteable public {pull}17829[#17829] (issue: {issue}17682[#17682])
|
||||
* Use try-with-resource when creating new parser instances where possible {pull}17822[#17822]
|
||||
* Don't pass XContentParser to ParseFieldRegistry#lookup {pull}17794[#17794]
|
||||
* Internal: Remove threadlocal from document parser {pull}17764[#17764]
|
||||
* Cut range aggregations to registerAggregation {pull}17757[#17757] (issue: {issue}17085[#17085])
|
||||
* Remove ParseFieldMatcher from AbstractXContentParser {pull}17756[#17756] (issue: {issue}17417[#17417])
|
||||
* Remove parser argument from methods where we already pass in a parse context {pull}17738[#17738]
|
||||
* Switch SearchAfterBuilder to writeGenericValue {pull}17735[#17735] (issue: {issue}17085[#17085])
|
||||
* Remove StreamableReader {pull}17729[#17729] (issue: {issue}17085[#17085])
|
||||
* Cleanup nested, has_child & has_parent query builders for inner hits construction {pull}17719[#17719] (issue: {issue}11118[#11118])
|
||||
* Make AllocationCommands NamedWriteables {pull}17661[#17661]
|
||||
* Isolate StreamableReader {pull}17656[#17656] (issue: {issue}17085[#17085])
|
||||
* Create registration methods for aggregations similar to those for queries {pull}17653[#17653] (issues: {issue}17085[#17085], {issue}17389[#17389])
|
||||
* Turn RestChannel into an interface {pull}17643[#17643] (issue: {issue}17133[#17133])
|
||||
* Remove PROTOTYPEs from QueryBuilders {pull}17632[#17632] (issue: {issue}17085[#17085])
|
||||
* Remove registerQueryParser {pull}17608[#17608]
|
||||
* ParseField#getAllNamesIncludedDeprecated to not return duplicate names {pull}17504[#17504]
|
||||
* Rework a query parser and improve registration {pull}17458[#17458]
|
||||
* Clean up QueryParseContext and don't hold it inside QueryRewrite/ShardContext {pull}17417[#17417]
|
||||
|
||||
Mapping::
|
||||
* Automatically upgrade analyzed strings with an analyzer to `text`. {pull}17861[#17861]
|
||||
* Support dots in field names when mapping already exists {pull}17759[#17759] (issue: {issue}15951[#15951])
|
||||
* Use the new points API to index numeric fields. {pull}17746[#17746] (issues: {issue}11513[#11513], {issue}16751[#16751], {issue}17007[#17007], {issue}17700[#17700])
|
||||
* Simplify AllEntries, AllField and AllFieldMapper: {pull}17613[#17613]
|
||||
|
||||
Network::
|
||||
* Limit request size {pull}17133[#17133] (issue: {issue}16011[#16011])
|
||||
|
||||
Packaging::
|
||||
* Remove unnecessary sleep from init script restart {pull}17966[#17966]
|
||||
* Explicitly set packaging permissions {pull}17912[#17912] (issue: {issue}17634[#17634])
|
||||
* Allow configuring Windows service name, description and user {pull}17312[#17312]
|
||||
* rpm uses non-portable `--system` flag to `useradd` {pull}14596[#14596] (issue: {issue}14211[#14211])
|
||||
|
||||
Percolator::
|
||||
* PercolatorQueryBuilder cleanup by using MemoryIndex#fromDocument(...) helper {pull}17669[#17669] (issue: {issue}9386[#9386])
|
||||
|
||||
Plugins::
|
||||
* Cli: Improve output for usage errors {pull}17938[#17938]
|
||||
* Cli: Add verbose output with zip url when installing plugin {pull}17662[#17662] (issue: {issue}17529[#17529])
|
||||
|
||||
Query DSL::
|
||||
* Add MatchNoDocsQuery, a query that matches no documents and prints the reason why in the toString method. {pull}17780[#17780]
|
||||
* Adds `ignore_unmapped` option to geo queries {pull}17751[#17751]
|
||||
* Adds `ignore_unmapped` option to nested and P/C queries {pull}17748[#17748]
|
||||
* SimpleQueryParser should call MappedFieldType.termQuery when appropriate. {pull}17678[#17678]
|
||||
|
||||
REST::
|
||||
* Allow JSON with unquoted field names by enabling system property {pull}17801[#17801] (issue: {issue}17674[#17674])
|
||||
|
||||
Recovery::
|
||||
* TransportNodesListGatewayStartedShards should fall back to disk based index metadata if not found in cluster state {pull}17663[#17663] (issue: {issue}17630[#17630])
|
||||
|
||||
Reindex API::
|
||||
* Properly mark reindex's child tasks as child tasks {pull}17770[#17770]
|
||||
|
||||
Search::
|
||||
* Fail query if it contains very large rescores {pull}17917[#17917] (issue: {issue}17522[#17522])
|
||||
|
||||
Settings::
|
||||
* Switch to registered Settings for all IndexingMemoryController settings {pull}17778[#17778] (issue: {issue}17442[#17442])
|
||||
|
||||
Stats::
|
||||
* Add points to SegmentStats. {pull}17775[#17775] (issue: {issue}16974[#16974])
|
||||
* Remove FieldStats.Float. {pull}17749[#17749]
|
||||
* Show configured and remaining delay for an unassigned shard. {pull}17515[#17515] (issue: {issue}17372[#17372])
|
||||
|
||||
Store::
|
||||
* Use `mmapfs` by default. {pull}17616[#17616] (issue: {issue}16983[#16983])
|
||||
|
||||
Suggesters::
|
||||
* Add bwc support for reading pre-5.0 completion index {pull}17602[#17602]
|
||||
|
||||
Task Manager::
|
||||
* Move parentTaskId into TransportRequest {pull}17872[#17872]
|
||||
* Shorten the serialization of the empty TaskId {pull}17870[#17870]
|
||||
* Expose whether a task is cancellable in the _tasks list API {pull}17464[#17464] (issue: {issue}17369[#17369])
|
||||
|
||||
|
||||
|
||||
[[bug-5.0.0-alpha2]]
|
||||
[float]
|
||||
=== Bug fixes
|
||||
|
||||
Aggregations::
|
||||
* Adds serialisation of sigma to extended_stats_bucket pipeline aggregation {pull}17703[#17703] (issue: {issue}17701[#17701])
|
||||
* Fixes NPE when no window is specified in moving average request {pull}17556[#17556] (issue: {issue}17516[#17516])
|
||||
* Fixes Filter and FiltersAggregation to work with empty query {pull}17542[#17542] (issue: {issue}17518[#17518])
|
||||
* ExtendedStatsAggregator should also pass sigma to emtpy aggs. {pull}17388[#17388] (issue: {issue}17362[#17362])
|
||||
|
||||
Allocation::
|
||||
* Rebalancing policy shouldn't prevent hard allocation decisions {pull}17698[#17698] (issues: {issue}14057[#14057], {issue}14259[#14259])
|
||||
* When considering the size of shadow replica shards, set size to 0 {pull}17509[#17509] (issue: {issue}17460[#17460])
|
||||
|
||||
Core::
|
||||
* Refactor UUID-generating methods out of Strings {pull}17837[#17837] (issue: {issue}17819[#17819])
|
||||
* Node names cleanup {pull}17723[#17723] (issue: {issue}17718[#17718])
|
||||
* NullPointerException from IndexingMemoryController when a version conflict happens during recovery {pull}17569[#17569]
|
||||
|
||||
Ingest::
|
||||
* Ingest does not close its factories {pull}17626[#17626] (issue: {issue}17625[#17625])
|
||||
|
||||
Internal::
|
||||
* Fix BulkItemResponse.Failure.toString {pull}17871[#17871]
|
||||
|
||||
Logging::
|
||||
* Add missing index name to search slow log. {pull}17818[#17818] (issue: {issue}17025[#17025])
|
||||
|
||||
Mapping::
|
||||
* Fix cross type mapping updates for `boolean` fields. {pull}17882[#17882] (issue: {issue}17879[#17879])
|
||||
* Fix dynamic check to properly handle parents {pull}17864[#17864] (issues: {issue}17644[#17644], {issue}17854[#17854])
|
||||
* Fix array parsing to remove its context when finished parsing {pull}17768[#17768]
|
||||
* Disallow fielddata loading on text fields that are not indexed. {pull}17747[#17747]
|
||||
* Fail if an object is added after a field with the same name. {pull}17568[#17568] (issue: {issue}17567[#17567])
|
||||
|
||||
Packaging::
|
||||
* Fix exit code {pull}17082[#17082]
|
||||
|
||||
Plugin Discovery EC2::
|
||||
* Fix EC2 Discovery settings {pull}17651[#17651] (issue: {issue}16602[#16602])
|
||||
|
||||
Plugins::
|
||||
* Quote path to java binary {pull}17496[#17496] (issue: {issue}17495[#17495])
|
||||
|
||||
Query DSL::
|
||||
* Apply the default operator on analyzed wildcard in simple_query_string builder {pull}17776[#17776]
|
||||
* Apply the default operator on analyzed wildcard in query_string builder: {pull}17711[#17711] (issue: {issue}2183[#2183])
|
||||
|
||||
REST::
|
||||
* Fixes reading of CORS pre-flight headers and methods {pull}17523[#17523] (issue: {issue}17483[#17483])
|
||||
* index is a required url part for update by query {pull}17503[#17503]
|
||||
|
||||
Reindex API::
|
||||
* Reindex should never report negative throttled_until {pull}17799[#17799] (issue: {issue}17783[#17783])
|
||||
* Reindex should gracefully handle when _source is disabled {pull}17667[#17667] (issue: {issue}17666[#17666])
|
||||
|
||||
Settings::
|
||||
* convert settings for ResourceWatcherService to new infrastructure {pull}17948[#17948]
|
||||
|
||||
Snapshot/Restore::
|
||||
* Fix the semantics for the BlobContainer interface {pull}17878[#17878] (issues: {issue}15579[#15579], {issue}15580[#15580])
|
||||
* On restore, selecting concrete indices can select wrong index {pull}17715[#17715]
|
||||
|
||||
Task Manager::
|
||||
* Shard level tasks in Bulk Action lose reference to their parent tasks {pull}17743[#17743]
|
||||
|
||||
Term Vectors::
|
||||
* Fix calculation of took time of term vectors request {pull}17817[#17817] (issue: {issue}12565[#12565])
|
||||
|
||||
|
||||
|
||||
[[upgrade-5.0.0-alpha2]]
|
||||
[float]
|
||||
=== Upgrades
|
||||
|
||||
Core::
|
||||
* Upgrade to lucene 6 release {pull}17657[#17657]
|
||||
|
|
@ -53,7 +53,7 @@ name, as follows:
|
|||
./bin/elasticsearch -E es.cluster.name=my_cluster -E es.node.name=node_1
|
||||
--------------------------------------------
|
||||
|
||||
NOTE: Values that contain spaces must be surrounded with qoutes. For instance `-E es.path.logs="C:\My Logs\logs"`.
|
||||
NOTE: Values that contain spaces must be surrounded with quotes. For instance `-E es.path.logs="C:\My Logs\logs"`.
|
||||
|
||||
TIP: Typically, any cluster-wide settings (like `cluster.name`) should be
|
||||
added to the `elasticsearch.yml` config file, while any node-specific settings
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
|
||||
/**
|
||||
* FunctionValues to get the count of the number of values in a field for a document.
|
||||
*/
|
||||
public class CountMethodFunctionValues extends DoubleDocValues {
|
||||
SortedNumericDoubleValues values;
|
||||
|
||||
CountMethodFunctionValues(ValueSource parent, AtomicNumericFieldData fieldData) {
|
||||
super(parent);
|
||||
|
||||
values = fieldData.getDoubleValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
values.setDocument(doc);
|
||||
return values.count();
|
||||
}
|
||||
}
|
|
@ -26,17 +26,18 @@ import java.util.Objects;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
|
||||
/**
|
||||
* A ValueSource to create FunctionValues to get the count of the number of values in a field for a document.
|
||||
*/
|
||||
public class CountMethodValueSource extends ValueSource {
|
||||
protected IndexFieldData<?> fieldData;
|
||||
final class CountMethodValueSource extends ValueSource {
|
||||
IndexFieldData<?> fieldData;
|
||||
|
||||
protected CountMethodValueSource(IndexFieldData<?> fieldData) {
|
||||
CountMethodValueSource(IndexFieldData<?> fieldData) {
|
||||
Objects.requireNonNull(fieldData);
|
||||
|
||||
this.fieldData = fieldData;
|
||||
|
@ -45,10 +46,16 @@ public class CountMethodValueSource extends ValueSource {
|
|||
@Override
|
||||
@SuppressWarnings("rawtypes") // ValueSource uses a rawtype
|
||||
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
|
||||
AtomicFieldData leafData = fieldData.load(leaf);
|
||||
assert(leafData instanceof AtomicNumericFieldData);
|
||||
AtomicNumericFieldData leafData = (AtomicNumericFieldData) fieldData.load(leaf);
|
||||
final SortedNumericDoubleValues values = leafData.getDoubleValues();
|
||||
|
||||
return new CountMethodFunctionValues(this, (AtomicNumericFieldData)leafData);
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
values.setDocument(doc);
|
||||
return values.count();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -63,7 +70,7 @@ public class CountMethodValueSource extends ValueSource {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return fieldData.hashCode();
|
||||
return 31 * getClass().hashCode() + fieldData.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
package org.elasticsearch.script.expression;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import java.util.Calendar;
|
||||
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
/**
|
||||
* Expressions API for date fields.
|
||||
*/
|
||||
final class DateField {
|
||||
// no instance
|
||||
private DateField() {}
|
||||
|
||||
// supported variables
|
||||
static final String VALUE_VARIABLE = "value";
|
||||
static final String EMPTY_VARIABLE = "empty";
|
||||
|
||||
// supported methods
|
||||
static final String MINIMUM_METHOD = "min";
|
||||
static final String MAXIMUM_METHOD = "max";
|
||||
static final String AVERAGE_METHOD = "avg";
|
||||
static final String MEDIAN_METHOD = "median";
|
||||
static final String SUM_METHOD = "sum";
|
||||
static final String COUNT_METHOD = "count";
|
||||
static final String GET_YEAR_METHOD = "getYear";
|
||||
static final String GET_MONTH_METHOD = "getMonth";
|
||||
static final String GET_DAY_OF_MONTH_METHOD = "getDayOfMonth";
|
||||
static final String GET_HOUR_OF_DAY_METHOD = "getHourOfDay";
|
||||
static final String GET_MINUTES_METHOD = "getMinutes";
|
||||
static final String GET_SECONDS_METHOD = "getSeconds";
|
||||
|
||||
static ValueSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||
switch (variable) {
|
||||
case VALUE_VARIABLE:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case EMPTY_VARIABLE:
|
||||
return new EmptyMemberValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for date field [" + fieldName + "].");
|
||||
}
|
||||
}
|
||||
|
||||
static ValueSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||
switch (method) {
|
||||
case MINIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case MAXIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||
case AVERAGE_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||
case MEDIAN_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||
case SUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||
case COUNT_METHOD:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
case GET_YEAR_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.YEAR);
|
||||
case GET_MONTH_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MONTH);
|
||||
case GET_DAY_OF_MONTH_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.DAY_OF_MONTH);
|
||||
case GET_HOUR_OF_DAY_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.HOUR_OF_DAY);
|
||||
case GET_MINUTES_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MINUTE);
|
||||
case GET_SECONDS_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.SECOND);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for date field [" + fieldName + "].");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.Locale;
|
||||
import java.util.TimeZone;
|
||||
|
||||
class DateMethodFunctionValues extends FieldDataFunctionValues {
|
||||
private final int calendarType;
|
||||
private final Calendar calendar;
|
||||
|
||||
DateMethodFunctionValues(ValueSource parent, MultiValueMode multiValueMode, AtomicNumericFieldData data, int calendarType) {
|
||||
super(parent, multiValueMode, data);
|
||||
|
||||
this.calendarType = calendarType;
|
||||
calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int docId) {
|
||||
long millis = (long)dataAccessor.get(docId);
|
||||
calendar.setTimeInMillis(millis);
|
||||
return calendar.get(calendarType);
|
||||
}
|
||||
}
|
|
@ -20,20 +20,25 @@
|
|||
package org.elasticsearch.script.expression;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Calendar;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
/** Extracts a portion of a date field with {@code Calendar.get()} */
|
||||
class DateMethodValueSource extends FieldDataValueSource {
|
||||
|
||||
protected final String methodName;
|
||||
protected final int calendarType;
|
||||
final String methodName;
|
||||
final int calendarType;
|
||||
|
||||
DateMethodValueSource(IndexFieldData<?> indexFieldData, MultiValueMode multiValueMode, String methodName, int calendarType) {
|
||||
super(indexFieldData, multiValueMode);
|
||||
|
@ -47,10 +52,17 @@ class DateMethodValueSource extends FieldDataValueSource {
|
|||
@Override
|
||||
@SuppressWarnings("rawtypes") // ValueSource uses a rawtype
|
||||
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
|
||||
AtomicFieldData leafData = fieldData.load(leaf);
|
||||
assert(leafData instanceof AtomicNumericFieldData);
|
||||
|
||||
return new DateMethodFunctionValues(this, multiValueMode, (AtomicNumericFieldData)leafData, calendarType);
|
||||
AtomicNumericFieldData leafData = (AtomicNumericFieldData) fieldData.load(leaf);
|
||||
final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);
|
||||
NumericDoubleValues docValues = multiValueMode.select(leafData.getDoubleValues(), 0d);
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int docId) {
|
||||
long millis = (long)docValues.get(docId);
|
||||
calendar.setTimeInMillis(millis);
|
||||
return calendar.get(calendarType);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
|
||||
/**
|
||||
* ValueSource to return non-zero if a field is missing.
|
||||
* <p>
|
||||
* This is essentially sugar over !count()
|
||||
*/
|
||||
final class EmptyMemberValueSource extends ValueSource {
|
||||
final IndexFieldData<?> fieldData;
|
||||
|
||||
EmptyMemberValueSource(IndexFieldData<?> fieldData) {
|
||||
this.fieldData = Objects.requireNonNull(fieldData);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes") // ValueSource uses a rawtype
|
||||
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
|
||||
AtomicNumericFieldData leafData = (AtomicNumericFieldData) fieldData.load(leaf);
|
||||
final SortedNumericDoubleValues values = leafData.getDoubleValues();
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
values.setDocument(doc);
|
||||
if (values.count() == 0) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * getClass().hashCode() + fieldData.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
EmptyMemberValueSource other = (EmptyMemberValueSource) obj;
|
||||
if (!fieldData.equals(other.fieldData)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "empty: field(" + fieldData.getFieldName() + ")";
|
||||
}
|
||||
}
|
|
@ -37,20 +37,19 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.script.ClassPermission;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.security.AccessControlContext;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.text.ParseException;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -65,20 +64,6 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
|||
|
||||
public static final List<String> TYPES = Collections.singletonList(NAME);
|
||||
|
||||
protected static final String GET_YEAR_METHOD = "getYear";
|
||||
protected static final String GET_MONTH_METHOD = "getMonth";
|
||||
protected static final String GET_DAY_OF_MONTH_METHOD = "getDayOfMonth";
|
||||
protected static final String GET_HOUR_OF_DAY_METHOD = "getHourOfDay";
|
||||
protected static final String GET_MINUTES_METHOD = "getMinutes";
|
||||
protected static final String GET_SECONDS_METHOD = "getSeconds";
|
||||
|
||||
protected static final String MINIMUM_METHOD = "min";
|
||||
protected static final String MAXIMUM_METHOD = "max";
|
||||
protected static final String AVERAGE_METHOD = "avg";
|
||||
protected static final String MEDIAN_METHOD = "median";
|
||||
protected static final String SUM_METHOD = "sum";
|
||||
protected static final String COUNT_METHOD = "count";
|
||||
|
||||
@Inject
|
||||
public ExpressionScriptEngineService(Settings settings) {
|
||||
super(settings);
|
||||
|
@ -169,6 +154,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
|||
} else {
|
||||
String fieldname = null;
|
||||
String methodname = null;
|
||||
String variablename = "value"; // .value is the default for doc['field'], its optional.
|
||||
VariableContext[] parts = VariableContext.parse(variable);
|
||||
if (parts[0].text.equals("doc") == false) {
|
||||
throw new ScriptException("Unknown variable [" + parts[0].text + "] in expression");
|
||||
|
@ -181,8 +167,10 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
|||
if (parts.length == 3) {
|
||||
if (parts[2].type == VariableContext.Type.METHOD) {
|
||||
methodname = parts[2].text;
|
||||
} else if (parts[2].type != VariableContext.Type.MEMBER || !"value".equals(parts[2].text)) {
|
||||
throw new ScriptException("Only the member variable [value] or member methods may be accessed on a field when not accessing the field directly");
|
||||
} else if (parts[2].type == VariableContext.Type.MEMBER) {
|
||||
variablename = parts[2].text;
|
||||
} else {
|
||||
throw new ScriptException("Only member variables or member methods may be accessed on a field when not accessing the field directly");
|
||||
}
|
||||
}
|
||||
if (parts.length > 3) {
|
||||
|
@ -196,15 +184,38 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
|||
}
|
||||
|
||||
IndexFieldData<?> fieldData = lookup.doc().fieldDataService().getForField(fieldType);
|
||||
if (fieldData instanceof IndexNumericFieldData == false) {
|
||||
// TODO: more context (which expression?)
|
||||
throw new ScriptException("Field [" + fieldname + "] used in expression must be numeric");
|
||||
}
|
||||
if (methodname == null) {
|
||||
bindings.add(variable, new FieldDataValueSource(fieldData, MultiValueMode.MIN));
|
||||
|
||||
// delegate valuesource creation based on field's type
|
||||
// there are three types of "fields" to expressions, and each one has a different "api" of variables and methods.
|
||||
|
||||
final ValueSource valueSource;
|
||||
if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) {
|
||||
// geo
|
||||
if (methodname == null) {
|
||||
valueSource = GeoField.getVariable(fieldData, fieldname, variablename);
|
||||
} else {
|
||||
valueSource = GeoField.getMethod(fieldData, fieldname, methodname);
|
||||
}
|
||||
} else if (fieldType instanceof LegacyDateFieldMapper.DateFieldType ||
|
||||
fieldType instanceof DateFieldMapper.DateFieldType) {
|
||||
// date
|
||||
if (methodname == null) {
|
||||
valueSource = DateField.getVariable(fieldData, fieldname, variablename);
|
||||
} else {
|
||||
valueSource = DateField.getMethod(fieldData, fieldname, methodname);
|
||||
}
|
||||
} else if (fieldData instanceof IndexNumericFieldData) {
|
||||
// number
|
||||
if (methodname == null) {
|
||||
valueSource = NumericField.getVariable(fieldData, fieldname, variablename);
|
||||
} else {
|
||||
valueSource = NumericField.getMethod(fieldData, fieldname, methodname);
|
||||
}
|
||||
} else {
|
||||
bindings.add(variable, getMethodValueSource(fieldType, fieldData, fieldname, methodname));
|
||||
throw new ScriptException("Field [" + fieldname + "] used in expression must be numeric, date, or geopoint");
|
||||
}
|
||||
|
||||
bindings.add(variable, valueSource);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,46 +226,6 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
|||
}
|
||||
}
|
||||
|
||||
protected ValueSource getMethodValueSource(MappedFieldType fieldType, IndexFieldData<?> fieldData, String fieldName, String methodName) {
|
||||
switch (methodName) {
|
||||
case GET_YEAR_METHOD:
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.YEAR);
|
||||
case GET_MONTH_METHOD:
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.MONTH);
|
||||
case GET_DAY_OF_MONTH_METHOD:
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.DAY_OF_MONTH);
|
||||
case GET_HOUR_OF_DAY_METHOD:
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.HOUR_OF_DAY);
|
||||
case GET_MINUTES_METHOD:
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.MINUTE);
|
||||
case GET_SECONDS_METHOD:
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.SECOND);
|
||||
case MINIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case MAXIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||
case AVERAGE_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||
case MEDIAN_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||
case SUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||
case COUNT_METHOD:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member method [" + methodName + "] does not exist.");
|
||||
}
|
||||
}
|
||||
|
||||
protected ValueSource getDateMethodValueSource(MappedFieldType fieldType, IndexFieldData<?> fieldData, String fieldName, String methodName, int calendarType) {
|
||||
if (fieldType instanceof LegacyDateFieldMapper.DateFieldType == false
|
||||
&& fieldType instanceof DateFieldMapper.DateFieldType == false) {
|
||||
throw new IllegalArgumentException("Member method [" + methodName + "] can only be used with a date field type, not the field [" + fieldName + "].");
|
||||
}
|
||||
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, methodName, calendarType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> vars) {
|
||||
return new ExpressionExecutableScript(compiledScript, vars);
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
/**
|
||||
* A {@link org.apache.lucene.queries.function.FunctionValues} which wrap field data.
|
||||
*/
|
||||
class FieldDataFunctionValues extends DoubleDocValues {
|
||||
NumericDoubleValues dataAccessor;
|
||||
|
||||
FieldDataFunctionValues(ValueSource parent, MultiValueMode m, AtomicNumericFieldData d) {
|
||||
super(parent);
|
||||
dataAccessor = m.select(d.getDoubleValues(), 0d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int i) {
|
||||
return dataAccessor.get(i);
|
||||
}
|
||||
}
|
|
@ -26,9 +26,10 @@ import java.util.Objects;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
/**
|
||||
|
@ -36,15 +37,12 @@ import org.elasticsearch.search.MultiValueMode;
|
|||
*/
|
||||
class FieldDataValueSource extends ValueSource {
|
||||
|
||||
protected IndexFieldData<?> fieldData;
|
||||
protected MultiValueMode multiValueMode;
|
||||
final IndexFieldData<?> fieldData;
|
||||
final MultiValueMode multiValueMode;
|
||||
|
||||
protected FieldDataValueSource(IndexFieldData<?> d, MultiValueMode m) {
|
||||
Objects.requireNonNull(d);
|
||||
Objects.requireNonNull(m);
|
||||
|
||||
fieldData = d;
|
||||
multiValueMode = m;
|
||||
protected FieldDataValueSource(IndexFieldData<?> fieldData, MultiValueMode multiValueMode) {
|
||||
this.fieldData = Objects.requireNonNull(fieldData);
|
||||
this.multiValueMode = Objects.requireNonNull(multiValueMode);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -69,9 +67,14 @@ class FieldDataValueSource extends ValueSource {
|
|||
@Override
|
||||
@SuppressWarnings("rawtypes") // ValueSource uses a rawtype
|
||||
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
|
||||
AtomicFieldData leafData = fieldData.load(leaf);
|
||||
assert(leafData instanceof AtomicNumericFieldData);
|
||||
return new FieldDataFunctionValues(this, multiValueMode, (AtomicNumericFieldData)leafData);
|
||||
AtomicNumericFieldData leafData = (AtomicNumericFieldData) fieldData.load(leaf);
|
||||
NumericDoubleValues docValues = multiValueMode.select(leafData.getDoubleValues(), 0d);
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return docValues.get(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
|
||||
/**
|
||||
* ValueSource to return non-zero if a field is missing.
|
||||
*/
|
||||
final class GeoEmptyValueSource extends ValueSource {
|
||||
IndexFieldData<?> fieldData;
|
||||
|
||||
GeoEmptyValueSource(IndexFieldData<?> fieldData) {
|
||||
this.fieldData = Objects.requireNonNull(fieldData);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes") // ValueSource uses a rawtype
|
||||
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
|
||||
AtomicGeoPointFieldData leafData = (AtomicGeoPointFieldData) fieldData.load(leaf);
|
||||
final MultiGeoPointValues values = leafData.getGeoPointValues();
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
values.setDocument(doc);
|
||||
if (values.count() == 0) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * getClass().hashCode() + fieldData.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
GeoEmptyValueSource other = (GeoEmptyValueSource) obj;
|
||||
if (!fieldData.equals(other.fieldData)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "empty: field(" + fieldData.getFieldName() + ")";
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package org.elasticsearch.script.expression;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
||||
/**
|
||||
* Expressions API for geo_point fields.
|
||||
*/
|
||||
final class GeoField {
|
||||
// no instance
|
||||
private GeoField() {}
|
||||
|
||||
// supported variables
|
||||
static final String EMPTY_VARIABLE = "empty";
|
||||
static final String LAT_VARIABLE = "lat";
|
||||
static final String LON_VARIABLE = "lon";
|
||||
|
||||
static ValueSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||
switch (variable) {
|
||||
case EMPTY_VARIABLE:
|
||||
return new GeoEmptyValueSource(fieldData);
|
||||
case LAT_VARIABLE:
|
||||
return new GeoLatitudeValueSource(fieldData);
|
||||
case LON_VARIABLE:
|
||||
return new GeoLongitudeValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for geo field [" + fieldName + "].");
|
||||
}
|
||||
}
|
||||
|
||||
static ValueSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for geo field [" + fieldName + "].");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
|
||||
/**
|
||||
* ValueSource to return latitudes as a double "stream" for geopoint fields
|
||||
*/
|
||||
final class GeoLatitudeValueSource extends ValueSource {
|
||||
final IndexFieldData<?> fieldData;
|
||||
|
||||
GeoLatitudeValueSource(IndexFieldData<?> fieldData) {
|
||||
this.fieldData = Objects.requireNonNull(fieldData);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes") // ValueSource uses a rawtype
|
||||
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
|
||||
AtomicGeoPointFieldData leafData = (AtomicGeoPointFieldData) fieldData.load(leaf);
|
||||
final MultiGeoPointValues values = leafData.getGeoPointValues();
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
values.setDocument(doc);
|
||||
if (values.count() == 0) {
|
||||
return 0.0;
|
||||
} else {
|
||||
return values.valueAt(0).getLat();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * getClass().hashCode() + fieldData.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
GeoLatitudeValueSource other = (GeoLatitudeValueSource) obj;
|
||||
if (!fieldData.equals(other.fieldData)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "lat: field(" + fieldData.getFieldName() + ")";
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
|
||||
/**
|
||||
* ValueSource to return longitudes as a double "stream" for geopoint fields
|
||||
*/
|
||||
final class GeoLongitudeValueSource extends ValueSource {
|
||||
final IndexFieldData<?> fieldData;
|
||||
|
||||
GeoLongitudeValueSource(IndexFieldData<?> fieldData) {
|
||||
this.fieldData = Objects.requireNonNull(fieldData);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes") // ValueSource uses a rawtype
|
||||
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
|
||||
AtomicGeoPointFieldData leafData = (AtomicGeoPointFieldData) fieldData.load(leaf);
|
||||
final MultiGeoPointValues values = leafData.getGeoPointValues();
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
values.setDocument(doc);
|
||||
if (values.count() == 0) {
|
||||
return 0.0;
|
||||
} else {
|
||||
return values.valueAt(0).getLon();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * getClass().hashCode() + fieldData.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
GeoLongitudeValueSource other = (GeoLongitudeValueSource) obj;
|
||||
if (!fieldData.equals(other.fieldData)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "lon: field(" + fieldData.getFieldName() + ")";
|
||||
}
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
package org.elasticsearch.script.expression;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
/**
|
||||
* Expressions API for numeric fields.
|
||||
*/
|
||||
final class NumericField {
|
||||
// no instance
|
||||
private NumericField() {}
|
||||
|
||||
// supported variables
|
||||
static final String VALUE_VARIABLE = "value";
|
||||
static final String EMPTY_VARIABLE = "empty";
|
||||
|
||||
// supported methods
|
||||
static final String MINIMUM_METHOD = "min";
|
||||
static final String MAXIMUM_METHOD = "max";
|
||||
static final String AVERAGE_METHOD = "avg";
|
||||
static final String MEDIAN_METHOD = "median";
|
||||
static final String SUM_METHOD = "sum";
|
||||
static final String COUNT_METHOD = "count";
|
||||
|
||||
static ValueSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||
switch (variable) {
|
||||
case VALUE_VARIABLE:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case EMPTY_VARIABLE:
|
||||
return new EmptyMemberValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for " +
|
||||
"numeric field [" + fieldName + "].");
|
||||
}
|
||||
}
|
||||
|
||||
static ValueSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||
switch (method) {
|
||||
case MINIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case MAXIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||
case AVERAGE_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||
case MEDIAN_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||
case SUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||
case COUNT_METHOD:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for numeric field [" + fieldName + "].");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -25,10 +25,10 @@ import org.apache.lucene.queries.function.FunctionValues;
|
|||
* A support class for an executable expression script that allows the double returned
|
||||
* by a {@link FunctionValues} to be modified.
|
||||
*/
|
||||
public class ReplaceableConstFunctionValues extends FunctionValues {
|
||||
final class ReplaceableConstFunctionValues extends FunctionValues {
|
||||
private double value = 0;
|
||||
|
||||
public void setValue(double value) {
|
||||
void setValue(double value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
|
|
|
@ -29,10 +29,10 @@ import org.apache.lucene.queries.function.ValueSource;
|
|||
/**
|
||||
* A {@link ValueSource} which has a stub {@link FunctionValues} that holds a dynamically replaceable constant double.
|
||||
*/
|
||||
class ReplaceableConstValueSource extends ValueSource {
|
||||
final class ReplaceableConstValueSource extends ValueSource {
|
||||
final ReplaceableConstFunctionValues fv;
|
||||
|
||||
public ReplaceableConstValueSource() {
|
||||
ReplaceableConstValueSource() {
|
||||
fv = new ReplaceableConstFunctionValues();
|
||||
}
|
||||
|
||||
|
|
|
@ -27,12 +27,17 @@ import java.util.Map;
|
|||
|
||||
import org.apache.lucene.expressions.Expression;
|
||||
import org.apache.lucene.expressions.js.JavascriptCompiler;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.action.update.UpdateRequestBuilder;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
|
||||
|
@ -51,8 +56,10 @@ import org.elasticsearch.search.aggregations.pipeline.SimpleValue;
|
|||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
|
||||
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketScript;
|
||||
|
@ -164,10 +171,10 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValueMethods() throws Exception {
|
||||
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double"));
|
||||
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double", "double2", "type=double"));
|
||||
ensureGreen("test");
|
||||
indexRandom(true,
|
||||
client().prepareIndex("test", "doc", "1").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double1", "1.2", "double1", "2.4"),
|
||||
client().prepareIndex("test", "doc", "1").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double1", "1.2", "double1", "2.4", "double2", "3.0"),
|
||||
client().prepareIndex("test", "doc", "2").setSource("double0", "5.0", "double1", "3.0"),
|
||||
client().prepareIndex("test", "doc", "3").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double0", "-1.5", "double1", "4.0"));
|
||||
|
||||
|
@ -227,6 +234,24 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
|||
assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||
assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||
|
||||
// make sure count() works for missing
|
||||
rsp = buildRequest("doc['double2'].count()").get();
|
||||
assertSearchResponse(rsp);
|
||||
hits = rsp.getHits();
|
||||
assertEquals(3, hits.getTotalHits());
|
||||
assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||
assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||
assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||
|
||||
// make sure .empty works in the same way
|
||||
rsp = buildRequest("doc['double2'].empty ? 5.0 : 2.0").get();
|
||||
assertSearchResponse(rsp);
|
||||
hits = rsp.getHits();
|
||||
assertEquals(3, hits.getTotalHits());
|
||||
assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||
assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||
}
|
||||
|
||||
public void testInvalidDateMethodCall() throws Exception {
|
||||
|
@ -239,8 +264,8 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
|||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.toString() + "should have contained IllegalArgumentException",
|
||||
e.toString().contains("IllegalArgumentException"), equalTo(true));
|
||||
assertThat(e.toString() + "should have contained can only be used with a date field type",
|
||||
e.toString().contains("can only be used with a date field type"), equalTo(true));
|
||||
assertThat(e.toString() + "should have contained does not exist for numeric field",
|
||||
e.toString().contains("does not exist for numeric field"), equalTo(true));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -363,8 +388,8 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
|||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.toString() + "should have contained ScriptException",
|
||||
e.toString().contains("ScriptException"), equalTo(true));
|
||||
assertThat(e.toString() + "should have contained member variable [value] or member methods may be accessed",
|
||||
e.toString().contains("member variable [value] or member methods may be accessed"), equalTo(true));
|
||||
assertThat(e.toString() + "should have contained member variable [bogus] does not exist",
|
||||
e.toString().contains("Member variable [bogus] does not exist"), equalTo(true));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -568,4 +593,71 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testGeo() throws Exception {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point");
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
.field("name", "test")
|
||||
.startObject("location").field("lat", 61.5240).field("lon", 105.3188).endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
refresh();
|
||||
// access .lat
|
||||
SearchResponse rsp = buildRequest("doc['location'].lat").get();
|
||||
assertSearchResponse(rsp);
|
||||
assertEquals(1, rsp.getHits().getTotalHits());
|
||||
assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||
// access .lon
|
||||
rsp = buildRequest("doc['location'].lon").get();
|
||||
assertSearchResponse(rsp);
|
||||
assertEquals(1, rsp.getHits().getTotalHits());
|
||||
assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||
// access .empty
|
||||
rsp = buildRequest("doc['location'].empty ? 1 : 0").get();
|
||||
assertSearchResponse(rsp);
|
||||
assertEquals(1, rsp.getHits().getTotalHits());
|
||||
assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||
// call haversin
|
||||
rsp = buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)").get();
|
||||
assertSearchResponse(rsp);
|
||||
assertEquals(1, rsp.getHits().getTotalHits());
|
||||
assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D);
|
||||
}
|
||||
|
||||
public void testBoolean() throws Exception {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("vip").field("type", "boolean");
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
indexRandom(true,
|
||||
client().prepareIndex("test", "doc", "1").setSource("price", 1.0, "vip", true),
|
||||
client().prepareIndex("test", "doc", "2").setSource("price", 2.0, "vip", false),
|
||||
client().prepareIndex("test", "doc", "3").setSource("price", 2.0, "vip", false));
|
||||
// access .value
|
||||
SearchResponse rsp = buildRequest("doc['vip'].value").get();
|
||||
assertSearchResponse(rsp);
|
||||
assertEquals(3, rsp.getHits().getTotalHits());
|
||||
assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||
assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
||||
assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
||||
// access .empty
|
||||
rsp = buildRequest("doc['vip'].empty ? 1 : 0").get();
|
||||
assertSearchResponse(rsp);
|
||||
assertEquals(3, rsp.getHits().getTotalHits());
|
||||
assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||
assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
||||
assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
||||
// ternary operator
|
||||
// vip's have a 50% discount
|
||||
rsp = buildRequest("doc['vip'] ? doc['price']/2 : doc['price']").get();
|
||||
assertSearchResponse(rsp);
|
||||
assertEquals(3, rsp.getHits().getTotalHits());
|
||||
assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||
assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
||||
assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,6 +74,7 @@ import org.junit.Before;
|
|||
import java.io.IOException;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Collections;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
|
@ -84,7 +85,7 @@ import static org.hamcrest.Matchers.containsString;
|
|||
public class TemplateQueryParserTests extends ESTestCase {
|
||||
|
||||
private Injector injector;
|
||||
private QueryShardContext context;
|
||||
private Supplier<QueryShardContext> contextFactory;
|
||||
|
||||
@Before
|
||||
public void setup() throws IOException {
|
||||
|
@ -134,7 +135,8 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap());
|
||||
MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry();
|
||||
MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, () -> context);
|
||||
MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, () ->
|
||||
contextFactory.get());
|
||||
IndicesFieldDataCache cache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() {});
|
||||
IndexFieldDataService indexFieldDataService =new IndexFieldDataService(idxSettings, cache, injector.getInstance(CircuitBreakerService.class), mapperService);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() {
|
||||
|
@ -149,7 +151,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
}
|
||||
});
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
|
||||
context = new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService,
|
||||
contextFactory = () -> new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService,
|
||||
similarityService, scriptService, indicesQueriesRegistry, proxy, null, null, null);
|
||||
}
|
||||
|
||||
|
@ -164,7 +166,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
String templateString = "{" + "\"query\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}";
|
||||
|
||||
XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString);
|
||||
context.reset();
|
||||
QueryShardContext context = contextFactory.get();
|
||||
templateSourceParser.nextToken();
|
||||
|
||||
Query query = QueryBuilder.rewriteQuery(TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)),
|
||||
|
@ -176,7 +178,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
String templateString = "{" + " \"inline\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\"," + " \"params\":{"
|
||||
+ " \"template\":\"all\"," + " \"use_it\": true" + " }" + "}";
|
||||
XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString);
|
||||
context.reset();
|
||||
QueryShardContext context = contextFactory.get();
|
||||
|
||||
Query query = QueryBuilder.rewriteQuery(TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)), context).toQuery(context);
|
||||
assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery);
|
||||
|
@ -192,7 +194,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
+ " \"params\":{" + " \"size\":2" + " }\n" + "}";
|
||||
|
||||
XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString);
|
||||
context.reset();
|
||||
QueryShardContext context = contextFactory.get();
|
||||
|
||||
try {
|
||||
TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).rewrite(context);
|
||||
|
@ -206,7 +208,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } ";
|
||||
|
||||
XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString);
|
||||
context.reset();
|
||||
QueryShardContext context = contextFactory.get();
|
||||
templateSourceParser.nextToken();
|
||||
|
||||
|
||||
|
@ -219,7 +221,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
String templateString = "{ \"file\": \"storedTemplate\" ,\"params\":{\"template\":\"all\" } } ";
|
||||
|
||||
XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString);
|
||||
context.reset();
|
||||
QueryShardContext context = contextFactory.get();
|
||||
templateSourceParser.nextToken();
|
||||
try {
|
||||
TemplateQueryBuilder.fromXContent(context.newParseContext(templateSourceParser)).toQuery(context);
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.carrotsearch.randomizedtesting.RandomizedTest;
|
|||
import com.carrotsearch.randomizedtesting.annotations.TestGroup;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -2040,15 +2041,20 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
protected HttpRequestBuilder httpClient() {
|
||||
return httpClient(HttpClients.createDefault());
|
||||
}
|
||||
|
||||
protected HttpRequestBuilder httpClient(CloseableHttpClient httpClient) {
|
||||
final NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get();
|
||||
final NodeInfo[] nodes = nodeInfos.getNodes();
|
||||
assertTrue(nodes.length > 0);
|
||||
TransportAddress publishAddress = randomFrom(nodes).getHttp().address().publishAddress();
|
||||
assertEquals(1, publishAddress.uniqueAddressTypeId());
|
||||
InetSocketAddress address = ((InetSocketTransportAddress) publishAddress).address();
|
||||
return new HttpRequestBuilder(HttpClients.createDefault()).host(NetworkAddress.format(address.getAddress())).port(address.getPort());
|
||||
return new HttpRequestBuilder(httpClient).host(NetworkAddress.format(address.getAddress())).port(address.getPort());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This method is executed iff the test is annotated with {@link SuiteScopeTestCase}
|
||||
* before the first test of this class is executed.
|
||||
|
|
Loading…
Reference in New Issue