Merge branch 'master' into feature/query-refactoring

Conflicts:
	core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java
This commit is contained in:
javanna 2015-06-16 11:12:00 +02:00 committed by Luca Cavanna
commit 5d3bedfe96
190 changed files with 2820 additions and 9357 deletions

View File

@ -100,8 +100,7 @@ You will find the newly built packages under: `./target/releases/`.
Before submitting your changes, run the test suite to make sure that nothing is broken, with: Before submitting your changes, run the test suite to make sure that nothing is broken, with:
```sh ```sh
ES_TEST_LOCAL=true mvn clean test -Dtests.slow=true
mvn clean test
``` ```
Source: [Contributing to elasticsearch](http://www.elasticsearch.org/contributing-to-elasticsearch/) Source: [Contributing to elasticsearch](http://www.elasticsearch.org/contributing-to-elasticsearch/)

View File

@ -205,7 +205,7 @@ Elasticsearch uses "Maven":http://maven.apache.org for its build system.
In order to create a distribution, simply run the @mvn clean package In order to create a distribution, simply run the @mvn clean package
-DskipTests@ command in the cloned directory. -DskipTests@ command in the cloned directory.
The distribution will be created under @target/releases@. The distribution for each project will be created under the @target/releases@ directory in that project.
See the "TESTING":TESTING.asciidoc file for more information about See the "TESTING":TESTING.asciidoc file for more information about
running the Elasticsearch test suite. running the Elasticsearch test suite.

View File

@ -66,69 +66,56 @@
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId> <artifactId>lucene-core</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId> <artifactId>lucene-backward-codecs</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId> <artifactId>lucene-analyzers-common</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-queries</artifactId> <artifactId>lucene-queries</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-memory</artifactId> <artifactId>lucene-memory</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-highlighter</artifactId> <artifactId>lucene-highlighter</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId> <artifactId>lucene-queryparser</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-suggest</artifactId> <artifactId>lucene-suggest</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-join</artifactId> <artifactId>lucene-join</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-spatial</artifactId> <artifactId>lucene-spatial</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-expressions</artifactId> <artifactId>lucene-expressions</artifactId>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.spatial4j</groupId> <groupId>com.spatial4j</groupId>
<artifactId>spatial4j</artifactId> <artifactId>spatial4j</artifactId>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.vividsolutions</groupId> <groupId>com.vividsolutions</groupId>
<artifactId>jts</artifactId> <artifactId>jts</artifactId>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<!-- needed for templating --> <!-- needed for templating -->
@ -140,45 +127,34 @@
<!-- Lucene spatial --> <!-- Lucene spatial -->
<!-- START: dependencies that are shaded --> <!-- START: dependencies that might be shaded -->
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.carrotsearch</groupId> <groupId>com.carrotsearch</groupId>
<artifactId>hppc</artifactId> <artifactId>hppc</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>joda-time</groupId> <groupId>joda-time</groupId>
<artifactId>joda-time</artifactId> <artifactId>joda-time</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.joda</groupId> <groupId>org.joda</groupId>
<artifactId>joda-convert</artifactId> <artifactId>joda-convert</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId> <artifactId>jackson-core</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId> <groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-smile</artifactId> <artifactId>jackson-dataformat-smile</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId> <groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-yaml</artifactId> <artifactId>jackson-dataformat-yaml</artifactId>
<scope>compile</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>com.fasterxml.jackson.core</groupId>
@ -186,83 +162,61 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId> <groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-cbor</artifactId> <artifactId>jackson-dataformat-cbor</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty</artifactId> <artifactId>netty</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.ning</groupId> <groupId>com.ning</groupId>
<artifactId>compress-lzf</artifactId> <artifactId>compress-lzf</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.tdunning</groupId> <groupId>com.tdunning</groupId>
<artifactId>t-digest</artifactId> <artifactId>t-digest</artifactId>
<scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>
</dependency> </dependency>
<!-- END: dependencies that might be shaded -->
<!-- END: dependencies that are shaded -->
<dependency> <dependency>
<groupId>org.codehaus.groovy</groupId> <groupId>org.codehaus.groovy</groupId>
<artifactId>groovy-all</artifactId> <artifactId>groovy-all</artifactId>
<classifier>indy</classifier> <classifier>indy</classifier>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency> <dependency>
<groupId>log4j</groupId> <groupId>log4j</groupId>
<artifactId>log4j</artifactId> <artifactId>log4j</artifactId>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency> <dependency>
<groupId>log4j</groupId> <groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId> <artifactId>apache-log4j-extras</artifactId>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency> <dependency>
<groupId>net.java.dev.jna</groupId> <groupId>net.java.dev.jna</groupId>
<artifactId>jna</artifactId> <artifactId>jna</artifactId>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.fusesource</groupId> <groupId>org.fusesource</groupId>
<artifactId>sigar</artifactId> <artifactId>sigar</artifactId>
<scope>compile</scope>
<optional>true</optional> <optional>true</optional>
</dependency> </dependency>

View File

@ -242,8 +242,10 @@ public class Version {
public static final Version V_1_6_0 = new Version(V_1_6_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4); public static final Version V_1_6_0 = new Version(V_1_6_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_6_1_ID = 1060199; public static final int V_1_6_1_ID = 1060199;
public static final Version V_1_6_1 = new Version(V_1_6_1_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4); public static final Version V_1_6_1 = new Version(V_1_6_1_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_0_ID = 1070099;
public static final Version V_1_7_0 = new Version(V_1_7_0_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_2_0_0_ID = 2000099; public static final int V_2_0_0_ID = 2000099;
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_0); public static final Version V_2_0_0 = new Version(V_2_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final Version CURRENT = V_2_0_0; public static final Version CURRENT = V_2_0_0;
@ -259,6 +261,8 @@ public class Version {
switch (id) { switch (id) {
case V_2_0_0_ID: case V_2_0_0_ID:
return V_2_0_0; return V_2_0_0;
case V_1_7_0_ID:
return V_1_7_0;
case V_1_6_1_ID: case V_1_6_1_ID:
return V_1_6_1; return V_1_6_1;
case V_1_6_0_ID: case V_1_6_0_ID:

View File

@ -165,12 +165,6 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO
} }
return defaultValue; return defaultValue;
} }
@Override
@Deprecated
public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) {
return paramAsBoolean(key, defaultValue);
}
}; };
private ImmutableMap<String, FieldMappingMetaData> findFieldMappingsByType(DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) { private ImmutableMap<String, FieldMappingMetaData> findFieldMappingsByType(DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) {

View File

@ -430,60 +430,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
return this; return this;
} }
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param script
* The script to use
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public SearchRequestBuilder addScriptField(String name, String script) {
sourceBuilder().scriptField(name, script);
return this;
}
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param script
* The script to use
* @param params
* Parameters that the script can use.
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public SearchRequestBuilder addScriptField(String name, String script, Map<String, Object> params) {
sourceBuilder().scriptField(name, script, params);
return this;
}
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param lang
* The language of the script
* @param script
* The script to use
* @param params
* Parameters that the script can use (can be <tt>null</tt>).
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public SearchRequestBuilder addScriptField(String name, String lang, String script, Map<String, Object> params) {
sourceBuilder().scriptField(name, lang, script, params);
return this;
}
/** /**
* Adds a sort against the given field name and the sort ordering. * Adds a sort against the given field name and the sort ordering.
* *

View File

@ -39,7 +39,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.suggest.stats.ShardSuggestService; import org.elasticsearch.index.suggest.stats.ShardSuggestMetric;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestPhase; import org.elasticsearch.search.suggest.SuggestPhase;
@ -130,8 +130,8 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) { protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) {
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
IndexShard indexShard = indexService.shardSafe(request.shardId().id()); IndexShard indexShard = indexService.shardSafe(request.shardId().id());
ShardSuggestService shardSuggestService = indexShard.shardSuggestService(); ShardSuggestMetric suggestMetric = indexShard.getSuggestMetric();
shardSuggestService.preSuggest(); suggestMetric.preSuggest();
long startTime = System.nanoTime(); long startTime = System.nanoTime();
XContentParser parser = null; XContentParser parser = null;
try (Engine.Searcher searcher = indexShard.acquireSearcher("suggest")) { try (Engine.Searcher searcher = indexShard.acquireSearcher("suggest")) {
@ -153,7 +153,7 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
if (parser != null) { if (parser != null) {
parser.close(); parser.close();
} }
shardSuggestService.postSuggest(System.nanoTime() - startTime); suggestMetric.postSuggest(System.nanoTime() - startTime);
} }
} }
} }

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import java.util.Map; import java.util.Map;
@ -87,64 +86,6 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
return this; return this;
} }
/**
* The script to execute. Note, make sure not to send different script each
* times and instead use script params if possible with the same
* (automatically compiled) script.
* <p/>
* The script works with the variable <code>ctx</code>, which is bound to
* the entry, e.g. <code>ctx._source.mycounter += 1</code>.
*
* @see #setScriptLang(String)
* @see #setScriptParams(Map)
*
* @deprecated use {@link #setScript(Script)} instead
*/
@Deprecated
public UpdateRequestBuilder setScript(String script, ScriptService.ScriptType scriptType) {
request.script(script, scriptType);
return this;
}
/**
* The language of the script to execute. Valid options are: mvel, js,
* groovy, python, and native (Java)<br>
* Default: groovy
* <p/>
* Ref:
* http://www.elasticsearch.org/guide/en/elasticsearch/reference/current
* /modules-scripting.html
*
* @deprecated use {@link #setScript(Script)} instead
*/
@Deprecated
public UpdateRequestBuilder setScriptLang(String scriptLang) {
request.scriptLang(scriptLang);
return this;
}
/**
* Sets the script parameters to use with the script.
*
* @deprecated use {@link #setScript(Script)} instead
*/
@Deprecated
public UpdateRequestBuilder setScriptParams(Map<String, Object> scriptParams) {
request.scriptParams(scriptParams);
return this;
}
/**
* Add a script parameter.
*
* @deprecated use {@link #setScript(Script)} instead
*/
@Deprecated
public UpdateRequestBuilder addScriptParam(String name, Object value) {
request.addScriptParam(name, value);
return this;
}
/** /**
* Explicitly specify the fields that will be returned. By default, nothing is returned. * Explicitly specify the fields that will be returned. By default, nothing is returned.
*/ */

View File

@ -390,12 +390,14 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu
try { try {
nodeLatch.await(fetchTimeout.getMillis(), TimeUnit.MILLISECONDS); nodeLatch.await(fetchTimeout.getMillis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); // restore interrupt status
logger.warn("Failed to update node information for ClusterInfoUpdateJob within 15s timeout"); logger.warn("Failed to update node information for ClusterInfoUpdateJob within 15s timeout");
} }
try { try {
indicesLatch.await(fetchTimeout.getMillis(), TimeUnit.MILLISECONDS); indicesLatch.await(fetchTimeout.getMillis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); // restore interrupt status
logger.warn("Failed to update shard information for ClusterInfoUpdateJob within 15s timeout"); logger.warn("Failed to update shard information for ClusterInfoUpdateJob within 15s timeout");
} }

View File

@ -60,44 +60,33 @@ public class ImmutableShardRouting implements Streamable, Serializable, ShardRou
} }
public ImmutableShardRouting(ShardRouting copy) { public ImmutableShardRouting(ShardRouting copy) {
this(copy.index(), copy.id(), copy.currentNodeId(), copy.primary(), copy.state(), copy.version()); this(copy, copy.version());
this.relocatingNodeId = copy.relocatingNodeId();
this.restoreSource = copy.restoreSource();
if (copy instanceof ImmutableShardRouting) {
this.shardIdentifier = ((ImmutableShardRouting) copy).shardIdentifier;
}
} }
public ImmutableShardRouting(ShardRouting copy, long version) { public ImmutableShardRouting(ShardRouting copy, long version) {
this(copy.index(), copy.id(), copy.currentNodeId(), copy.primary(), copy.state(), copy.version()); this(copy.index(), copy.id(), copy.currentNodeId(), copy.relocatingNodeId(), copy.restoreSource(), copy.primary(), copy.state(), version);
this.relocatingNodeId = copy.relocatingNodeId();
this.restoreSource = copy.restoreSource();
this.version = version;
if (copy instanceof ImmutableShardRouting) {
this.shardIdentifier = ((ImmutableShardRouting) copy).shardIdentifier;
} }
public ImmutableShardRouting(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state, long version) {
this(index, shardId, currentNodeId, null, primary, state, version);
} }
public ImmutableShardRouting(String index, int shardId, String currentNodeId, public ImmutableShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, boolean primary, ShardRoutingState state, long version) { String relocatingNodeId, boolean primary, ShardRoutingState state, long version) {
this(index, shardId, currentNodeId, primary, state, version); this(index, shardId, currentNodeId, relocatingNodeId, null, primary, state, version);
this.relocatingNodeId = relocatingNodeId;
} }
public ImmutableShardRouting(String index, int shardId, String currentNodeId, public ImmutableShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version) { String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version) {
this(index, shardId, currentNodeId, relocatingNodeId, primary, state, version);
this.restoreSource = restoreSource;
}
public ImmutableShardRouting(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state, long version) {
this.index = index; this.index = index;
this.shardId = shardId; this.shardId = shardId;
this.currentNodeId = currentNodeId; this.currentNodeId = currentNodeId;
this.relocatingNodeId = relocatingNodeId;
this.primary = primary; this.primary = primary;
this.state = state; this.state = state;
this.asList = ImmutableList.of((ShardRouting) this); this.asList = ImmutableList.of((ShardRouting) this);
this.version = version; this.version = version;
this.restoreSource = restoreSource;
} }
@Override @Override

View File

@ -32,25 +32,25 @@ public class MutableShardRouting extends ImmutableShardRouting {
} }
public MutableShardRouting(ShardRouting copy, long version) { public MutableShardRouting(ShardRouting copy, long version) {
super(copy); super(copy, version);
this.version = version;
}
public MutableShardRouting(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state, long version) {
super(index, shardId, currentNodeId, primary, state, version);
}
public MutableShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, boolean primary, ShardRoutingState state, long version) {
super(index, shardId, currentNodeId, relocatingNodeId, null, primary, state, version);
} }
public MutableShardRouting(String index, int shardId, String currentNodeId, public MutableShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version) { String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version) {
super(index, shardId, currentNodeId, relocatingNodeId, restoreSource, primary, state, version); super(index, shardId, currentNodeId, relocatingNodeId, restoreSource, primary, state, version);
assert state != ShardRoutingState.UNASSIGNED : "new mutable routing should not be created with UNASSIGNED state, should moveToUnassigned";
} }
/**
* Moves the shard to unassigned state.
*/
void moveToUnassigned() {
version++;
assert state != ShardRoutingState.UNASSIGNED;
state = ShardRoutingState.UNASSIGNED;
currentNodeId = null;
relocatingNodeId = null;
}
/** /**
* Assign this shard to a node. * Assign this shard to a node.

View File

@ -109,7 +109,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
// add the counterpart shard with relocatingNodeId reflecting the source from which // add the counterpart shard with relocatingNodeId reflecting the source from which
// it's relocating from. // it's relocating from.
sr = new MutableShardRouting(shard.index(), shard.id(), shard.relocatingNodeId(), sr = new MutableShardRouting(shard.index(), shard.id(), shard.relocatingNodeId(),
shard.currentNodeId(), shard.primary(), ShardRoutingState.INITIALIZING, shard.version()); shard.currentNodeId(), shard.restoreSource(), shard.primary(), ShardRoutingState.INITIALIZING, shard.version());
entries.add(sr); entries.add(sr);
assignedShardsAdd(sr); assignedShardsAdd(sr);
} else if (!shard.active()) { // shards that are initializing without being relocated } else if (!shard.active()) { // shards that are initializing without being relocated
@ -796,9 +796,10 @@ public class RoutingNodes implements Iterable<RoutingNode> {
} }
public void moveToUnassigned() { public void moveToUnassigned() {
iterator().remove(); remove();
unassigned().add(new MutableShardRouting(shard.index(), shard.id(), MutableShardRouting unassigned = new MutableShardRouting(shard); // protective copy of the mutable shard
null, shard.primary(), ShardRoutingState.UNASSIGNED, shard.version() + 1)); unassigned.moveToUnassigned();
unassigned().add(unassigned);
} }
} }
} }

View File

@ -470,14 +470,11 @@ public class AllocationService extends AbstractComponent {
MutableShardRouting shardRouting = relocatingFromNode.next(); MutableShardRouting shardRouting = relocatingFromNode.next();
if (shardRouting.equals(failedShard)) { if (shardRouting.equals(failedShard)) {
dirty = true; dirty = true;
relocatingFromNode.remove();
if (addToIgnoreList) { if (addToIgnoreList) {
// make sure we ignore this shard on the relevant node // make sure we ignore this shard on the relevant node
allocation.addIgnoreShardForNode(failedShard.shardId(), failedShard.currentNodeId()); allocation.addIgnoreShardForNode(failedShard.shardId(), failedShard.currentNodeId());
} }
relocatingFromNode.moveToUnassigned();
routingNodes.unassigned().add(new MutableShardRouting(failedShard.index(), failedShard.id(),
null, failedShard.primary(), ShardRoutingState.UNASSIGNED, failedShard.version() + 1));
break; break;
} }
} }
@ -512,7 +509,6 @@ public class AllocationService extends AbstractComponent {
// make sure we ignore this shard on the relevant node // make sure we ignore this shard on the relevant node
allocation.addIgnoreShardForNode(failedShard.shardId(), failedShard.currentNodeId()); allocation.addIgnoreShardForNode(failedShard.shardId(), failedShard.currentNodeId());
} }
node.remove();
// move all the shards matching the failed shard to the end of the unassigned list // move all the shards matching the failed shard to the end of the unassigned list
// so we give a chance for other allocations and won't create poison failed allocations // so we give a chance for other allocations and won't create poison failed allocations
// that can keep other shards from being allocated (because of limits applied on how many // that can keep other shards from being allocated (because of limits applied on how many
@ -529,9 +525,7 @@ public class AllocationService extends AbstractComponent {
routingNodes.unassigned().addAll(shardsToMove); routingNodes.unassigned().addAll(shardsToMove);
} }
routingNodes.unassigned().add(new MutableShardRouting(failedShard.index(), failedShard.id(), null, node.moveToUnassigned();
null, failedShard.restoreSource(), failedShard.primary(), ShardRoutingState.UNASSIGNED, failedShard.version() + 1));
break; break;
} }
} }

View File

@ -222,9 +222,7 @@ public class CancelAllocationCommand implements AllocationCommand {
throw new IllegalArgumentException("[cancel_allocation] can't cancel " + shardId + " on node " + throw new IllegalArgumentException("[cancel_allocation] can't cancel " + shardId + " on node " +
discoNode + ", shard is primary and started"); discoNode + ", shard is primary and started");
} }
it.remove(); it.moveToUnassigned();
allocation.routingNodes().unassigned().add(new MutableShardRouting(shardRouting.index(), shardRouting.id(),
null, shardRouting.primary(), ShardRoutingState.UNASSIGNED, shardRouting.version() + 1));
} }
} }
if (!found) { if (!found) {

View File

@ -58,13 +58,13 @@ public final class AllTermQuery extends PayloadTermQuery {
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
// TODO: needsScores // TODO: needsScores
// we should be able to just return a regular SpanTermWeight, at most here if needsScores == false? // we should be able to just return a regular SpanTermWeight, at most here if needsScores == false?
return new AllTermWeight(this, searcher); return new AllTermWeight(this, searcher, needsScores);
} }
class AllTermWeight extends PayloadTermWeight { class AllTermWeight extends PayloadTermWeight {
AllTermWeight(AllTermQuery query, IndexSearcher searcher) throws IOException { AllTermWeight(AllTermQuery query, IndexSearcher searcher, boolean needsScores) throws IOException {
super(query, searcher); super(query, searcher, needsScores);
} }
@Override @Override

View File

@ -37,13 +37,6 @@ public interface ToXContent {
boolean paramAsBoolean(String key, boolean defaultValue); boolean paramAsBoolean(String key, boolean defaultValue);
Boolean paramAsBoolean(String key, Boolean defaultValue); Boolean paramAsBoolean(String key, Boolean defaultValue);
/**
* @deprecated since 1.0.0
* use {@link ToXContent.Params#paramAsBoolean(String, Boolean)} instead
*/
@Deprecated
Boolean paramAsBooleanOptional(String key, Boolean defaultValue);
} }
public static final Params EMPTY_PARAMS = new Params() { public static final Params EMPTY_PARAMS = new Params() {
@ -67,10 +60,6 @@ public interface ToXContent {
return defaultValue; return defaultValue;
} }
@Override @Deprecated
public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) {
return paramAsBoolean(key, defaultValue);
}
}; };
public static class MapParams implements Params { public static class MapParams implements Params {
@ -104,11 +93,6 @@ public interface ToXContent {
public Boolean paramAsBoolean(String key, Boolean defaultValue) { public Boolean paramAsBoolean(String key, Boolean defaultValue) {
return Booleans.parseBoolean(param(key), defaultValue); return Booleans.parseBoolean(param(key), defaultValue);
} }
@Override @Deprecated
public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) {
return paramAsBoolean(key, defaultValue);
}
} }
public static class DelegatingMapParams extends MapParams { public static class DelegatingMapParams extends MapParams {
@ -139,11 +123,6 @@ public interface ToXContent {
public Boolean paramAsBoolean(String key, Boolean defaultValue) { public Boolean paramAsBoolean(String key, Boolean defaultValue) {
return super.paramAsBoolean(key, delegate.paramAsBoolean(key, defaultValue)); return super.paramAsBoolean(key, delegate.paramAsBoolean(key, defaultValue));
} }
@Override @Deprecated
public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) {
return super.paramAsBooleanOptional(key, delegate.paramAsBooleanOptional(key, defaultValue));
}
} }
XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException; XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException;

View File

@ -177,7 +177,7 @@ public class XContentFactory {
public static XContent xContent(CharSequence content) { public static XContent xContent(CharSequence content) {
XContentType type = xContentType(content); XContentType type = xContentType(content);
if (type == null) { if (type == null) {
throw new ElasticsearchParseException("Failed to derive xcontent from " + content); throw new ElasticsearchParseException("Failed to derive xcontent");
} }
return xContent(type); return xContent(type);
} }
@ -195,7 +195,7 @@ public class XContentFactory {
public static XContent xContent(byte[] data, int offset, int length) { public static XContent xContent(byte[] data, int offset, int length) {
XContentType type = xContentType(data, offset, length); XContentType type = xContentType(data, offset, length);
if (type == null) { if (type == null) {
throw new ElasticsearchParseException("Failed to derive xcontent from (offset=" + offset + ", length=" + length + "): " + Arrays.toString(data)); throw new ElasticsearchParseException("Failed to derive xcontent");
} }
return xContent(type); return xContent(type);
} }
@ -290,7 +290,7 @@ public class XContentFactory {
public static XContent xContent(BytesReference bytes) { public static XContent xContent(BytesReference bytes) {
XContentType type = xContentType(bytes); XContentType type = xContentType(bytes);
if (type == null) { if (type == null) {
throw new ElasticsearchParseException("Failed to derive xcontent from " + bytes); throw new ElasticsearchParseException("Failed to derive xcontent");
} }
return xContent(type); return xContent(type);
} }

View File

@ -31,7 +31,6 @@ import org.elasticsearch.common.util.concurrent.KeyedLock;
import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.plain.*; import org.elasticsearch.index.fielddata.plain.*;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
@ -60,8 +59,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
private static final String DOC_VALUES_FORMAT = "doc_values"; private static final String DOC_VALUES_FORMAT = "doc_values";
private static final String ARRAY_FORMAT = "array"; private static final String ARRAY_FORMAT = "array";
private static final String PAGED_BYTES_FORMAT = "paged_bytes"; private static final String PAGED_BYTES_FORMAT = "paged_bytes";
private static final String FST_FORMAT = "fst";
private static final String COMPRESSED_FORMAT = "compressed";
private final static ImmutableMap<String, IndexFieldData.Builder> buildersByType; private final static ImmutableMap<String, IndexFieldData.Builder> buildersByType;
private final static ImmutableMap<String, IndexFieldData.Builder> docValuesBuildersByType; private final static ImmutableMap<String, IndexFieldData.Builder> docValuesBuildersByType;
@ -99,7 +96,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
buildersByTypeAndFormat = MapBuilder.<Tuple<String, String>, IndexFieldData.Builder>newMapBuilder() buildersByTypeAndFormat = MapBuilder.<Tuple<String, String>, IndexFieldData.Builder>newMapBuilder()
.put(Tuple.tuple("string", PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder()) .put(Tuple.tuple("string", PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder())
.put(Tuple.tuple("string", FST_FORMAT), new FSTBytesIndexFieldData.Builder())
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder()) .put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
.put(Tuple.tuple("string", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) .put(Tuple.tuple("string", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
@ -130,7 +126,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
.put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointDoubleArrayIndexFieldData.Builder()) .put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointDoubleArrayIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new GeoPointBinaryDVIndexFieldData.Builder()) .put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new GeoPointBinaryDVIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) .put(Tuple.tuple("geo_point", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", COMPRESSED_FORMAT), new GeoPointCompressedIndexFieldData.Builder())
.put(Tuple.tuple("binary", DOC_VALUES_FORMAT), new BytesBinaryDVIndexFieldData.Builder()) .put(Tuple.tuple("binary", DOC_VALUES_FORMAT), new BytesBinaryDVIndexFieldData.Builder())
.put(Tuple.tuple("binary", DISABLED_FORMAT), new DisabledIndexFieldData.Builder()) .put(Tuple.tuple("binary", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;

View File

@ -1,121 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FST.Arc;
import org.apache.lucene.util.fst.FST.BytesReader;
import org.apache.lucene.util.fst.Util;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
*/
public class FSTBytesAtomicFieldData extends AbstractAtomicOrdinalsFieldData {
// 0 ordinal in values means no value (its null)
protected final Ordinals ordinals;
private long size = -1;
private final FST<Long> fst;
public FSTBytesAtomicFieldData(FST<Long> fst, Ordinals ordinals) {
this.ordinals = ordinals;
this.fst = fst;
}
@Override
public void close() {
}
@Override
public long ramBytesUsed() {
if (size == -1) {
long size = ordinals.ramBytesUsed();
// FST
size += fst == null ? 0 : fst.ramBytesUsed();
this.size = size;
}
return size;
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.add(Accountables.namedAccountable("ordinals", ordinals));
if (fst != null) {
resources.add(Accountables.namedAccountable("terms", fst));
}
return Collections.unmodifiableList(resources);
}
@Override
public RandomAccessOrds getOrdinalsValues() {
return ordinals.ordinals(new ValuesHolder(fst));
}
private static class ValuesHolder implements Ordinals.ValuesHolder {
private final FST<Long> fst;
// per-thread resources
private final BytesRefBuilder scratch;
protected final BytesReader in;
protected final Arc<Long> firstArc = new Arc<>();
protected final Arc<Long> scratchArc = new Arc<>();
protected final IntsRefBuilder scratchInts = new IntsRefBuilder();
ValuesHolder(FST<Long> fst) {
this.fst = fst;
scratch = new BytesRefBuilder();
in = fst.getBytesReader();
}
@Override
public BytesRef lookupOrd(long ord) {
assert ord != SortedSetDocValues.NO_MORE_ORDS;
in.setPosition(0);
fst.getFirstArc(firstArc);
try {
IntsRef output = Util.getByOutput(fst, ord, in, firstArc, scratchArc, scratchInts);
scratch.clear();
scratch.grow(output.length);
Util.toBytesRef(output, scratch);
} catch (IOException ex) {
//bogus
}
return scratch.get();
}
}
}

View File

@ -1,116 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FST.INPUT_TYPE;
import org.apache.lucene.util.fst.PositiveIntOutputs;
import org.apache.lucene.util.fst.Util;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
/**
*/
public class FSTBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
private final CircuitBreakerService breakerService;
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new FSTBytesIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
}
}
FSTBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType,
IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache, breakerService);
this.breakerService = breakerService;
}
@Override
public AtomicOrdinalsFieldData loadDirect(LeafReaderContext context) throws Exception {
LeafReader reader = context.reader();
Terms terms = reader.terms(getFieldNames().indexName());
AtomicOrdinalsFieldData data = null;
// TODO: Use an actual estimator to estimate before loading.
NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA));
if (terms == null) {
data = AbstractAtomicOrdinalsFieldData.empty();
estimator.afterLoad(null, data.ramBytesUsed());
return data;
}
PositiveIntOutputs outputs = PositiveIntOutputs.getSingleton();
org.apache.lucene.util.fst.Builder<Long> fstBuilder = new org.apache.lucene.util.fst.Builder<>(INPUT_TYPE.BYTE1, outputs);
final IntsRefBuilder scratch = new IntsRefBuilder();
final long numTerms;
if (regex == null && frequency == null) {
numTerms = terms.size();
} else {
numTerms = -1;
}
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
boolean success = false;
try (OrdinalsBuilder builder = new OrdinalsBuilder(numTerms, reader.maxDoc(), acceptableTransientOverheadRatio)) {
// we don't store an ord 0 in the FST since we could have an empty string in there and FST don't support
// empty strings twice. ie. them merge fails for long output.
TermsEnum termsEnum = filter(terms, reader);
PostingsEnum docsEnum = null;
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.next()) {
final long termOrd = builder.nextOrdinal();
fstBuilder.add(Util.toIntsRef(term, scratch), (long) termOrd);
docsEnum = termsEnum.postings(null, docsEnum, PostingsEnum.NONE);
for (int docId = docsEnum.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docsEnum.nextDoc()) {
builder.addDoc(docId);
}
}
FST<Long> fst = fstBuilder.finish();
final Ordinals ordinals = builder.build(fieldDataType.getSettings());
data = new FSTBytesAtomicFieldData(fst, ordinals);
success = true;
return data;
} finally {
if (success) {
estimator.afterLoad(null, data.ramBytesUsed());
}
}
}
}

View File

@ -1,169 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PagedMutable;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.GeoPointValues;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* Field data atomic impl for geo points with lossy compression.
*/
public abstract class GeoPointCompressedAtomicFieldData extends AbstractAtomicGeoPointFieldData {
@Override
public void close() {
}
static class WithOrdinals extends GeoPointCompressedAtomicFieldData {
private final GeoPointFieldMapper.Encoding encoding;
private final PagedMutable lon, lat;
private final Ordinals ordinals;
private final int maxDoc;
public WithOrdinals(GeoPointFieldMapper.Encoding encoding, PagedMutable lon, PagedMutable lat, Ordinals ordinals, int maxDoc) {
super();
this.encoding = encoding;
this.lon = lon;
this.lat = lat;
this.ordinals = ordinals;
this.maxDoc = maxDoc;
}
@Override
public long ramBytesUsed() {
return RamUsageEstimator.NUM_BYTES_INT/*size*/ + lon.ramBytesUsed() + lat.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.add(Accountables.namedAccountable("latitude", lat));
resources.add(Accountables.namedAccountable("longitude", lon));
return Collections.unmodifiableList(resources);
}
@Override
public MultiGeoPointValues getGeoPointValues() {
final RandomAccessOrds ords = ordinals.ordinals();
final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords);
if (singleOrds != null) {
final GeoPoint point = new GeoPoint();
final GeoPointValues values = new GeoPointValues() {
@Override
public GeoPoint get(int docID) {
final int ord = singleOrds.getOrd(docID);
if (ord >= 0) {
encoding.decode(lat.get(ord), lon.get(ord), point);
} else {
point.reset(0, 0);
}
return point;
}
};
return FieldData.singleton(values, DocValues.docsWithValue(singleOrds, maxDoc));
} else {
final GeoPoint point = new GeoPoint();
return new MultiGeoPointValues() {
@Override
public GeoPoint valueAt(int index) {
final long ord = ords.ordAt(index);
encoding.decode(lat.get(ord), lon.get(ord), point);
return point;
}
@Override
public void setDocument(int docId) {
ords.setDocument(docId);
}
@Override
public int count() {
return ords.cardinality();
}
};
}
}
}
/**
* Assumes unset values are marked in bitset, and docId is used as the index to the value array.
*/
public static class Single extends GeoPointCompressedAtomicFieldData {
private final GeoPointFieldMapper.Encoding encoding;
private final PagedMutable lon, lat;
private final BitSet set;
public Single(GeoPointFieldMapper.Encoding encoding, PagedMutable lon, PagedMutable lat, BitSet set) {
super();
this.encoding = encoding;
this.lon = lon;
this.lat = lat;
this.set = set;
}
@Override
public long ramBytesUsed() {
return RamUsageEstimator.NUM_BYTES_INT/*size*/ + lon.ramBytesUsed() + lat.ramBytesUsed() + (set == null ? 0 : set.ramBytesUsed());
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.add(Accountables.namedAccountable("latitude", lat));
resources.add(Accountables.namedAccountable("longitude", lon));
if (set != null) {
resources.add(Accountables.namedAccountable("missing bitset", set));
}
return Collections.unmodifiableList(resources);
}
@Override
public MultiGeoPointValues getGeoPointValues() {
final GeoPoint point = new GeoPoint();
final GeoPointValues values = new GeoPointValues() {
@Override
public GeoPoint get(int docID) {
encoding.decode(lat.get(docID), lon.get(docID), point);
return point;
}
};
return FieldData.singleton(values, set);
}
}
}

View File

@ -1,157 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PagedMutable;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.unit.DistanceUnit.Distance;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
/**
*/
public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointFieldData {
private static final String PRECISION_KEY = "precision";
private static final Distance DEFAULT_PRECISION_VALUE = new Distance(1, DistanceUnit.CENTIMETERS);
private final CircuitBreakerService breakerService;
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
FieldDataType type = fieldType.fieldDataType();
final String precisionAsString = type.getSettings().get(PRECISION_KEY);
final Distance precision;
if (precisionAsString != null) {
precision = Distance.parseDistance(precisionAsString);
} else {
precision = DEFAULT_PRECISION_VALUE;
}
return new GeoPointCompressedIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, precision, breakerService);
}
}
private final GeoPointFieldMapper.Encoding encoding;
public GeoPointCompressedIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, Distance precision,
CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);
this.encoding = GeoPointFieldMapper.Encoding.of(precision);
this.breakerService = breakerService;
}
@Override
public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception {
LeafReader reader = context.reader();
Terms terms = reader.terms(getFieldNames().indexName());
AtomicGeoPointFieldData data = null;
// TODO: Use an actual estimator to estimate before loading.
NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA));
if (terms == null) {
data = AbstractAtomicGeoPointFieldData.empty(reader.maxDoc());
estimator.afterLoad(null, data.ramBytesUsed());
return data;
}
final long initialSize;
if (terms.size() >= 0) {
initialSize = 1 + terms.size();
} else { // codec doesn't expose size
initialSize = 1 + Math.min(1 << 12, reader.maxDoc());
}
final int pageSize = Integer.highestOneBit(BigArrays.PAGE_SIZE_IN_BYTES * 8 / encoding.numBitsPerCoordinate() - 1) << 1;
PagedMutable lat = new PagedMutable(initialSize, pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
PagedMutable lon = new PagedMutable(initialSize, pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
boolean success = false;
try (OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio)) {
final GeoPointEnum iter = new GeoPointEnum(builder.buildFromTerms(terms.iterator()));
GeoPoint point;
while ((point = iter.next()) != null) {
final long ord = builder.currentOrdinal();
if (lat.size() <= ord) {
final long newSize = BigArrays.overSize(ord + 1);
lat = lat.resize(newSize);
lon = lon.resize(newSize);
}
lat.set(ord, encoding.encodeCoordinate(point.getLat()));
lon.set(ord, encoding.encodeCoordinate(point.getLon()));
}
Ordinals build = builder.build(fieldDataType.getSettings());
RandomAccessOrds ordinals = build.ordinals();
if (FieldData.isMultiValued(ordinals) || CommonSettings.getMemoryStorageHint(fieldDataType) == CommonSettings.MemoryStorageFormat.ORDINALS) {
if (lat.size() != ordinals.getValueCount()) {
lat = lat.resize(ordinals.getValueCount());
lon = lon.resize(ordinals.getValueCount());
}
data = new GeoPointCompressedAtomicFieldData.WithOrdinals(encoding, lon, lat, build, reader.maxDoc());
} else {
int maxDoc = reader.maxDoc();
PagedMutable sLat = new PagedMutable(reader.maxDoc(), pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
PagedMutable sLon = new PagedMutable(reader.maxDoc(), pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
final long missing = encoding.encodeCoordinate(0);
for (int i = 0; i < maxDoc; i++) {
ordinals.setDocument(i);
final long nativeOrdinal = ordinals.nextOrd();
if (nativeOrdinal >= 0) {
sLat.set(i, lat.get(nativeOrdinal));
sLon.set(i, lon.get(nativeOrdinal));
} else {
sLat.set(i, missing);
sLon.set(i, missing);
}
}
BitSet set = builder.buildDocsWithValuesSet();
data = new GeoPointCompressedAtomicFieldData.Single(encoding, sLon, sLat, set);
}
success = true;
return data;
} finally {
if (success) {
estimator.afterLoad(null, data.ramBytesUsed());
}
}
}
}

View File

@ -64,41 +64,26 @@ import static com.google.common.collect.Maps.newHashMapWithExpectedSize;
/** /**
*/ */
public class ShardGetService extends AbstractIndexShardComponent { public final class ShardGetService extends AbstractIndexShardComponent {
private final ScriptService scriptService;
private final MapperService mapperService; private final MapperService mapperService;
private final IndexFieldDataService fieldDataService;
private IndexShard indexShard;
private final MeanMetric existsMetric = new MeanMetric(); private final MeanMetric existsMetric = new MeanMetric();
private final MeanMetric missingMetric = new MeanMetric(); private final MeanMetric missingMetric = new MeanMetric();
private final CounterMetric currentMetric = new CounterMetric(); private final CounterMetric currentMetric = new CounterMetric();
private final IndexShard indexShard;
@Inject public ShardGetService(IndexShard indexShard,
public ShardGetService(ShardId shardId, @IndexSettings Settings indexSettings, ScriptService scriptService, MapperService mapperService) {
MapperService mapperService, IndexFieldDataService fieldDataService) { super(indexShard.shardId(), indexShard.indexSettings());
super(shardId, indexSettings);
this.scriptService = scriptService;
this.mapperService = mapperService; this.mapperService = mapperService;
this.fieldDataService = fieldDataService; this.indexShard = indexShard;
} }
public GetStats stats() { public GetStats stats() {
return new GetStats(existsMetric.count(), TimeUnit.NANOSECONDS.toMillis(existsMetric.sum()), missingMetric.count(), TimeUnit.NANOSECONDS.toMillis(missingMetric.sum()), currentMetric.count()); return new GetStats(existsMetric.count(), TimeUnit.NANOSECONDS.toMillis(existsMetric.sum()), missingMetric.count(), TimeUnit.NANOSECONDS.toMillis(missingMetric.sum()), currentMetric.count());
} }
// sadly, to overcome cyclic dep, we need to do this and inject it ourselves...
public ShardGetService setIndexShard(IndexShard indexShard) {
this.indexShard = indexShard;
return this;
}
public GetResult get(String type, String id, String[] gFields, boolean realtime, long version, VersionType versionType, FetchSourceContext fetchSourceContext, boolean ignoreErrorsOnGeneratedFields) public GetResult get(String type, String id, String[] gFields, boolean realtime, long version, VersionType versionType, FetchSourceContext fetchSourceContext, boolean ignoreErrorsOnGeneratedFields) {
{
currentMetric.inc(); currentMetric.inc();
try { try {
long now = System.nanoTime(); long now = System.nanoTime();
@ -151,7 +136,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
/** /**
* decides what needs to be done based on the request input and always returns a valid non-null FetchSourceContext * decides what needs to be done based on the request input and always returns a valid non-null FetchSourceContext
*/ */
protected FetchSourceContext normalizeFetchSourceContent(@Nullable FetchSourceContext context, @Nullable String[] gFields) { private FetchSourceContext normalizeFetchSourceContent(@Nullable FetchSourceContext context, @Nullable String[] gFields) {
if (context != null) { if (context != null) {
return context; return context;
} }
@ -166,7 +151,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
return FetchSourceContext.DO_NOT_FETCH_SOURCE; return FetchSourceContext.DO_NOT_FETCH_SOURCE;
} }
public GetResult innerGet(String type, String id, String[] gFields, boolean realtime, long version, VersionType versionType, FetchSourceContext fetchSourceContext, boolean ignoreErrorsOnGeneratedFields) { private GetResult innerGet(String type, String id, String[] gFields, boolean realtime, long version, VersionType versionType, FetchSourceContext fetchSourceContext, boolean ignoreErrorsOnGeneratedFields) {
fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, gFields); fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, gFields);
boolean loadSource = (gFields != null && gFields.length > 0) || fetchSourceContext.fetchSource(); boolean loadSource = (gFields != null && gFields.length > 0) || fetchSourceContext.fetchSource();
@ -238,7 +223,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
value = source.source.length(); value = source.source.length();
} else { } else {
if (searchLookup == null) { if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, fieldDataService, new String[]{type}); searchLookup = new SearchLookup(mapperService, null, new String[]{type});
searchLookup.source().setSource(source.source); searchLookup.source().setSource(source.source);
} }
@ -370,7 +355,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
} }
} else if (!fieldMapper.fieldType().stored() && !fieldMapper.isGenerated()) { } else if (!fieldMapper.fieldType().stored() && !fieldMapper.isGenerated()) {
if (searchLookup == null) { if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, fieldDataService, new String[]{type}); searchLookup = new SearchLookup(mapperService, null, new String[]{type});
LeafSearchLookup leafSearchLookup = searchLookup.getLeafSearchLookup(docIdAndVersion.context); LeafSearchLookup leafSearchLookup = searchLookup.getLeafSearchLookup(docIdAndVersion.context);
searchLookup.source().setSource(source); searchLookup.source().setSource(source);
leafSearchLookup.setDocument(docIdAndVersion.docId); leafSearchLookup.setDocument(docIdAndVersion.docId);

View File

@ -46,6 +46,7 @@ import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
* This defines the core properties and functions to operate on a field. * This defines the core properties and functions to operate on a field.
@ -200,7 +201,29 @@ public class MappedFieldType extends FieldType {
return new MappedFieldType(this); return new MappedFieldType(this);
} }
// norelease: we need to override freeze() and add safety checks that all settings are actually set @Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
MappedFieldType fieldType = (MappedFieldType) o;
return boost == fieldType.boost &&
docValues == fieldType.docValues &&
Objects.equals(names, fieldType.names) &&
Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) &&
Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) &&
Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) &&
Objects.equals(similarity, fieldType.similarity) &&
Objects.equals(normsLoading, fieldType.normsLoading) &&
Objects.equals(fieldDataType, fieldType.fieldDataType) &&
Objects.equals(nullValue, fieldType.nullValue) &&
Objects.equals(nullValueAsString, fieldType.nullValueAsString);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), names, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, similarity, normsLoading, fieldDataType, nullValue, nullValueAsString);
}
// norelease: we need to override freeze() and add safety checks that all settings are actually set
public boolean isNumeric() { public boolean isNumeric() {
return false; return false;

View File

@ -532,35 +532,26 @@ public class MapperService extends AbstractIndexComponent {
return fields; return fields;
} }
public SmartNameObjectMapper smartNameObjectMapper(String smartName, @Nullable String[] types) { public ObjectMapper getObjectMapper(String name, @Nullable String[] types) {
if (types == null || types.length == 0 || types.length == 1 && types[0].equals("_all")) { if (types == null || types.length == 0 || types.length == 1 && types[0].equals("_all")) {
ObjectMappers mappers = fullPathObjectMappers.get(smartName); ObjectMappers mappers = fullPathObjectMappers.get(name);
if (mappers != null) { if (mappers != null) {
return new SmartNameObjectMapper(mappers.mapper(), guessDocMapper(smartName)); return mappers.mapper();
} }
return null; return null;
} }
for (String type : types) { for (String type : types) {
DocumentMapper possibleDocMapper = mappers.get(type); DocumentMapper possibleDocMapper = mappers.get(type);
if (possibleDocMapper != null) { if (possibleDocMapper != null) {
ObjectMapper mapper = possibleDocMapper.objectMappers().get(smartName); ObjectMapper mapper = possibleDocMapper.objectMappers().get(name);
if (mapper != null) { if (mapper != null) {
return new SmartNameObjectMapper(mapper, possibleDocMapper); return mapper;
} }
} }
} }
return null; return null;
} }
private DocumentMapper guessDocMapper(String path) {
for (DocumentMapper documentMapper : docMappers(false)) {
if (documentMapper.objectMappers().containsKey(path)) {
return documentMapper;
}
}
return null;
}
public MappedFieldType smartNameFieldType(String smartName) { public MappedFieldType smartNameFieldType(String smartName) {
MappedFieldType fieldType = fullName(smartName); MappedFieldType fieldType = fullName(smartName);
if (fieldType != null) { if (fieldType != null) {
@ -663,32 +654,6 @@ public class MapperService extends AbstractIndexComponent {
return META_FIELDS.contains(fieldName); return META_FIELDS.contains(fieldName);
} }
public static class SmartNameObjectMapper {
private final ObjectMapper mapper;
private final DocumentMapper docMapper;
public SmartNameObjectMapper(ObjectMapper mapper, @Nullable DocumentMapper docMapper) {
this.mapper = mapper;
this.docMapper = docMapper;
}
public boolean hasMapper() {
return mapper != null;
}
public ObjectMapper mapper() {
return mapper;
}
public boolean hasDocMapper() {
return docMapper != null;
}
public DocumentMapper docMapper() {
return docMapper;
}
}
final class SmartIndexNameSearchAnalyzer extends DelegatingAnalyzerWrapper { final class SmartIndexNameSearchAnalyzer extends DelegatingAnalyzerWrapper {
private final Analyzer defaultAnalyzer; private final Analyzer defaultAnalyzer;

View File

@ -49,6 +49,7 @@ import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.MapperBuilders.binaryField; import static org.elasticsearch.index.mapper.MapperBuilders.binaryField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
@ -82,7 +83,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
@Override @Override
public BinaryFieldMapper build(BuilderContext context) { public BinaryFieldMapper build(BuilderContext context) {
setupFieldType(context); setupFieldType(context);
((BinaryFieldType)fieldType).tryUncompressing = context.indexCreatedVersion().before(Version.V_2_0_0); ((BinaryFieldType)fieldType).setTryUncompressing(context.indexCreatedVersion().before(Version.V_2_0_0));
return new BinaryFieldMapper(fieldType, docValues, return new BinaryFieldMapper(fieldType, docValues,
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
} }
@ -106,7 +107,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
} }
static final class BinaryFieldType extends MappedFieldType { static final class BinaryFieldType extends MappedFieldType {
protected boolean tryUncompressing = false; private boolean tryUncompressing = false;
public BinaryFieldType() { public BinaryFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE); super(AbstractFieldMapper.Defaults.FIELD_TYPE);
@ -122,6 +123,27 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
return new BinaryFieldType(this); return new BinaryFieldType(this);
} }
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
BinaryFieldType that = (BinaryFieldType) o;
return Objects.equals(tryUncompressing, that.tryUncompressing);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), tryUncompressing);
}
public boolean tryUncompressing() {
return tryUncompressing;
}
public void setTryUncompressing(boolean tryUncompressing) {
checkIfFrozen();
this.tryUncompressing = tryUncompressing;
}
@Override @Override
public BytesReference value(Object value) { public BytesReference value(Object value) {
if (value == null) { if (value == null) {

View File

@ -57,6 +57,7 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -234,6 +235,19 @@ public class DateFieldMapper extends NumberFieldMapper {
return new DateFieldType(this); return new DateFieldType(this);
} }
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
DateFieldType that = (DateFieldType) o;
return Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) &&
Objects.equals(timeUnit, that.timeUnit);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), dateTimeFormatter.format(), timeUnit);
}
public FormatDateTimeFormatter dateTimeFormatter() { public FormatDateTimeFormatter dateTimeFormatter() {
return dateTimeFormatter; return dateTimeFormatter;
} }

View File

@ -312,6 +312,26 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
return new GeoPointFieldType(this); return new GeoPointFieldType(this);
} }
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
GeoPointFieldType that = (GeoPointFieldType) o;
return geohashPrecision == that.geohashPrecision &&
geohashPrefixEnabled == that.geohashPrefixEnabled &&
validateLon == that.validateLon &&
validateLat == that.validateLat &&
normalizeLon == that.normalizeLon &&
normalizeLat == that.normalizeLat &&
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
java.util.Objects.equals(latFieldType, that.latFieldType) &&
java.util.Objects.equals(lonFieldType, that.lonFieldType);
}
@Override
public int hashCode() {
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType, lonFieldType, validateLon, validateLat, normalizeLon, normalizeLat);
}
public boolean isGeohashEnabled() { public boolean isGeohashEnabled() {
return geohashFieldType != null; return geohashFieldType != null;
} }

View File

@ -51,6 +51,7 @@ import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.MapperBuilders.geoShapeField; import static org.elasticsearch.index.mapper.MapperBuilders.geoShapeField;
@ -59,7 +60,7 @@ import static org.elasticsearch.index.mapper.MapperBuilders.geoShapeField;
* FieldMapper for indexing {@link com.spatial4j.core.shape.Shape}s. * FieldMapper for indexing {@link com.spatial4j.core.shape.Shape}s.
* <p/> * <p/>
* Currently Shapes can only be indexed and can only be queried using * Currently Shapes can only be indexed and can only be queried using
* {@link org.elasticsearch.index.query.GeoShapeFilterParser}, consequently * {@link org.elasticsearch.index.query.GeoShapeQueryParser}, consequently
* a lot of behavior in this Mapper is disabled. * a lot of behavior in this Mapper is disabled.
* <p/> * <p/>
* Format supported: * Format supported:
@ -91,12 +92,15 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
public static final String STRATEGY = SpatialStrategy.RECURSIVE.getStrategyName(); public static final String STRATEGY = SpatialStrategy.RECURSIVE.getStrategyName();
public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m"); public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m");
public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m"); public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m");
public static final double DISTANCE_ERROR_PCT = 0.025d; public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d;
public static final Orientation ORIENTATION = Orientation.RIGHT; public static final Orientation ORIENTATION = Orientation.RIGHT;
public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType(); public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType();
static { static {
// setting name here is a hack so freeze can be called...instead all these options should be
// moved to the default ctor for GeoShapeFieldType, and defaultFieldType() should be removed from mappers...
FIELD_TYPE.setNames(new MappedFieldType.Names("DoesNotExist"));
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false); FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false); FIELD_TYPE.setStored(false);
@ -108,91 +112,30 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
public static class Builder extends AbstractFieldMapper.Builder<Builder, GeoShapeFieldMapper> { public static class Builder extends AbstractFieldMapper.Builder<Builder, GeoShapeFieldMapper> {
private String tree = Defaults.TREE;
private String strategyName = Defaults.STRATEGY;
private int treeLevels = 0;
private double precisionInMeters = -1;
private double distanceErrorPct = Defaults.DISTANCE_ERROR_PCT;
private boolean distErrPctDefined;
private Orientation orientation = Defaults.ORIENTATION;
private SpatialPrefixTree prefixTree;
public Builder(String name) { public Builder(String name) {
super(name, Defaults.FIELD_TYPE); super(name, Defaults.FIELD_TYPE);
} }
public Builder tree(String tree) { public GeoShapeFieldType fieldType() {
this.tree = tree; return (GeoShapeFieldType)fieldType;
return this;
}
public Builder strategy(String strategy) {
this.strategyName = strategy;
return this;
}
public Builder treeLevelsByDistance(double meters) {
this.precisionInMeters = meters;
return this;
}
public Builder treeLevels(int treeLevels) {
this.treeLevels = treeLevels;
return this;
}
public Builder distanceErrorPct(double distanceErrorPct) {
this.distanceErrorPct = distanceErrorPct;
return this;
}
public Builder orientation(Orientation orientation) {
this.orientation = orientation;
return this;
} }
@Override @Override
public GeoShapeFieldMapper build(BuilderContext context) { public GeoShapeFieldMapper build(BuilderContext context) {
GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType;
if (Names.TREE_GEOHASH.equals(tree)) { if (geoShapeFieldType.tree.equals("quadtree") && context.indexCreatedVersion().before(Version.V_2_0_0)) {
prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true)); geoShapeFieldType.setTree("legacyquadtree");
} else if (Names.TREE_QUADTREE.equals(tree)) {
if (context.indexCreatedVersion().before(Version.V_1_6_0)) {
prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults
.QUADTREE_LEVELS, false));
} else {
prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults
.QUADTREE_LEVELS, false));
} }
} else {
throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]"); if (context.indexCreatedVersion().before(Version.V_2_0_0) ||
(geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0)) {
geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT);
} }
setupFieldType(context); setupFieldType(context);
RecursivePrefixTreeStrategy recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, fieldType.names().indexName());
recursiveStrategy.setDistErrPct(distanceErrorPct);
recursiveStrategy.setPruneLeafyBranches(false);
TermQueryPrefixTreeStrategy termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, fieldType.names().indexName());
termStrategy.setDistErrPct(distanceErrorPct);
GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType;
geoShapeFieldType.setStrategies(strategyName, recursiveStrategy, termStrategy);
geoShapeFieldType.setOrientation(orientation);
return new GeoShapeFieldMapper(fieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); return new GeoShapeFieldMapper(fieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
} }
private final int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) {
if (treeLevels > 0 || precisionInMeters >= 0) {
// if the user specified a precision but not a distance error percent then zero out the distance err pct
// this is done to guarantee precision specified by the user without doing something unexpected under the covers
if (!distErrPctDefined) distanceErrorPct = 0;
return Math.max(treeLevels, precisionInMeters >= 0 ? (geoHash ? GeoUtils.geoHashLevelsForPrecision(precisionInMeters)
: GeoUtils.quadTreeLevelsForPrecision(precisionInMeters)) : 0);
}
return defaultLevels;
}
} }
public static class TypeParser implements Mapper.TypeParser { public static class TypeParser implements Mapper.TypeParser {
@ -200,31 +143,27 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
@Override @Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = geoShapeField(name); Builder builder = geoShapeField(name);
// if index was created before 1.6, this conditional should be true (this forces any index created on/or after 1.6 to use 0 for
// the default distanceErrorPct parameter).
builder.distErrPctDefined = parserContext.indexVersionCreated().before(Version.V_1_6_0);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next(); Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey()); String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue(); Object fieldNode = entry.getValue();
if (Names.TREE.equals(fieldName)) { if (Names.TREE.equals(fieldName)) {
builder.tree(fieldNode.toString()); builder.fieldType().setTree(fieldNode.toString());
iterator.remove(); iterator.remove();
} else if (Names.TREE_LEVELS.equals(fieldName)) { } else if (Names.TREE_LEVELS.equals(fieldName)) {
builder.treeLevels(Integer.parseInt(fieldNode.toString())); builder.fieldType().setTreeLevels(Integer.parseInt(fieldNode.toString()));
iterator.remove(); iterator.remove();
} else if (Names.TREE_PRESISION.equals(fieldName)) { } else if (Names.TREE_PRESISION.equals(fieldName)) {
builder.treeLevelsByDistance(DistanceUnit.parse(fieldNode.toString(), DistanceUnit.DEFAULT, DistanceUnit.DEFAULT)); builder.fieldType().setPrecisionInMeters(DistanceUnit.parse(fieldNode.toString(), DistanceUnit.DEFAULT, DistanceUnit.DEFAULT));
iterator.remove(); iterator.remove();
} else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) { } else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) {
builder.distanceErrorPct(Double.parseDouble(fieldNode.toString())); builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString()));
builder.distErrPctDefined = true;
iterator.remove(); iterator.remove();
} else if (Names.ORIENTATION.equals(fieldName)) { } else if (Names.ORIENTATION.equals(fieldName)) {
builder.orientation(ShapeBuilder.orientationFromString(fieldNode.toString())); builder.fieldType().setOrientation(ShapeBuilder.orientationFromString(fieldNode.toString()));
iterator.remove(); iterator.remove();
} else if (Names.STRATEGY.equals(fieldName)) { } else if (Names.STRATEGY.equals(fieldName)) {
builder.strategy(fieldNode.toString()); builder.fieldType().setStrategyName(fieldNode.toString());
iterator.remove(); iterator.remove();
} }
} }
@ -234,10 +173,18 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
public static final class GeoShapeFieldType extends MappedFieldType { public static final class GeoShapeFieldType extends MappedFieldType {
private String tree = Defaults.TREE;
private String strategyName = Defaults.STRATEGY;
private int treeLevels = 0;
private double precisionInMeters = -1;
private Double distanceErrorPct;
private double defaultDistanceErrorPct = 0.0;
private Orientation orientation = Defaults.ORIENTATION;
// these are built when the field type is frozen
private PrefixTreeStrategy defaultStrategy; private PrefixTreeStrategy defaultStrategy;
private RecursivePrefixTreeStrategy recursiveStrategy; private RecursivePrefixTreeStrategy recursiveStrategy;
private TermQueryPrefixTreeStrategy termStrategy; private TermQueryPrefixTreeStrategy termStrategy;
private Orientation orientation;
public GeoShapeFieldType() { public GeoShapeFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE); super(AbstractFieldMapper.Defaults.FIELD_TYPE);
@ -245,10 +192,12 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
protected GeoShapeFieldType(GeoShapeFieldType ref) { protected GeoShapeFieldType(GeoShapeFieldType ref) {
super(ref); super(ref);
// TODO: this shallow copy is probably not good...need to extract the parameters and recreate the tree and strategies? this.tree = ref.tree;
this.defaultStrategy = ref.defaultStrategy; this.strategyName = ref.strategyName;
this.recursiveStrategy = ref.recursiveStrategy; this.treeLevels = ref.treeLevels;
this.termStrategy = ref.termStrategy; this.precisionInMeters = ref.precisionInMeters;
this.distanceErrorPct = ref.distanceErrorPct;
this.defaultDistanceErrorPct = ref.defaultDistanceErrorPct;
this.orientation = ref.orientation; this.orientation = ref.orientation;
} }
@ -257,6 +206,113 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
return new GeoShapeFieldType(this); return new GeoShapeFieldType(this);
} }
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
GeoShapeFieldType that = (GeoShapeFieldType) o;
return treeLevels == that.treeLevels &&
precisionInMeters == that.precisionInMeters &&
defaultDistanceErrorPct == that.defaultDistanceErrorPct &&
Objects.equals(tree, that.tree) &&
Objects.equals(strategyName, that.strategyName) &&
Objects.equals(distanceErrorPct, that.distanceErrorPct) &&
orientation == that.orientation;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), tree, strategyName, treeLevels, precisionInMeters, distanceErrorPct, defaultDistanceErrorPct, orientation);
}
@Override
public void freeze() {
super.freeze();
// This is a bit hackish: we need to setup the spatial tree and strategies once the field name is set, which
// must be by the time freeze is called.
SpatialPrefixTree prefixTree;
if ("geohash".equals(tree)) {
prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true));
} else if ("legacyquadtree".equals(tree)) {
prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else if ("quadtree".equals(tree)) {
prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else {
throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]");
}
recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, names().indexName());
recursiveStrategy.setDistErrPct(distanceErrorPct());
recursiveStrategy.setPruneLeafyBranches(false);
termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, names().indexName());
termStrategy.setDistErrPct(distanceErrorPct());
defaultStrategy = resolveStrategy(strategyName);
}
private static int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) {
if (treeLevels > 0 || precisionInMeters >= 0) {
return Math.max(treeLevels, precisionInMeters >= 0 ? (geoHash ? GeoUtils.geoHashLevelsForPrecision(precisionInMeters)
: GeoUtils.quadTreeLevelsForPrecision(precisionInMeters)) : 0);
}
return defaultLevels;
}
public String tree() {
return tree;
}
public void setTree(String tree) {
checkIfFrozen();
this.tree = tree;
}
public String strategyName() {
return strategyName;
}
public void setStrategyName(String strategyName) {
checkIfFrozen();
this.strategyName = strategyName;
}
public int treeLevels() {
return treeLevels;
}
public void setTreeLevels(int treeLevels) {
checkIfFrozen();
this.treeLevels = treeLevels;
}
public double precisionInMeters() {
return precisionInMeters;
}
public void setPrecisionInMeters(double precisionInMeters) {
checkIfFrozen();
this.precisionInMeters = precisionInMeters;
}
public double distanceErrorPct() {
return distanceErrorPct == null ? defaultDistanceErrorPct : distanceErrorPct;
}
public void setDistanceErrorPct(double distanceErrorPct) {
checkIfFrozen();
this.distanceErrorPct = distanceErrorPct;
}
public void setDefaultDistanceErrorPct(double defaultDistanceErrorPct) {
checkIfFrozen();
this.defaultDistanceErrorPct = defaultDistanceErrorPct;
}
public Orientation orientation() { return this.orientation; }
public void setOrientation(Orientation orientation) {
checkIfFrozen();
this.orientation = orientation;
}
public PrefixTreeStrategy defaultStrategy() { public PrefixTreeStrategy defaultStrategy() {
return this.defaultStrategy; return this.defaultStrategy;
} }
@ -271,26 +327,6 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]");
} }
public void setStrategies(String defaultStrategy, RecursivePrefixTreeStrategy recursiveStrategy, TermQueryPrefixTreeStrategy termStrategy) {
checkIfFrozen();
this.recursiveStrategy = recursiveStrategy;
this.termStrategy = termStrategy;
this.defaultStrategy = resolveStrategy(defaultStrategy);
}
public void setDistErrPct(double distErrPct) {
checkIfFrozen();
this.recursiveStrategy.setDistErrPct(distErrPct);
this.termStrategy.setDistErrPct(distErrPct);
}
public Orientation orientation() { return this.orientation; }
public void setOrientation(Orientation orientation) {
checkIfFrozen();
this.orientation = orientation;
}
@Override @Override
public String value(Object value) { public String value(Object value) {
throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values"); throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values");
@ -352,25 +388,24 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
return; return;
} }
final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith; final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith;
final PrefixTreeStrategy mergeWithStrategy = fieldMergeWith.fieldType().defaultStrategy();
// prevent user from changing strategies // prevent user from changing strategies
if (!(this.fieldType().defaultStrategy().getClass().equals(mergeWithStrategy.getClass()))) { if (fieldType().strategyName().equals(fieldMergeWith.fieldType().strategyName()) == false) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different strategy"); mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different strategy");
} }
final SpatialPrefixTree grid = this.fieldType().defaultStrategy().getGrid();
final SpatialPrefixTree mergeGrid = mergeWithStrategy.getGrid();
// prevent user from changing trees (changes encoding) // prevent user from changing trees (changes encoding)
if (!grid.getClass().equals(mergeGrid.getClass())) { if (fieldType().tree().equals(fieldMergeWith.fieldType().tree()) == false) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree"); mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree");
} }
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables // TODO we should allow this, but at the moment levels is used to build bookkeeping variables
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first // in lucene's SpatialPrefixTree implementations, need a patch to correct that first
if (grid.getMaxLevels() != mergeGrid.getMaxLevels()) { if (fieldType().treeLevels() != fieldMergeWith.fieldType().treeLevels()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree_levels or precision"); mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree_levels");
}
if (fieldType().precisionInMeters() != fieldMergeWith.fieldType().precisionInMeters()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different precision");
} }
// bail if there were merge conflicts // bail if there were merge conflicts
@ -380,7 +415,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
// change distance error percent // change distance error percent
this.fieldType = this.fieldType.clone(); this.fieldType = this.fieldType.clone();
this.fieldType().setDistErrPct(mergeWithStrategy.getDistErrPct()); this.fieldType().setDistanceErrorPct(fieldMergeWith.fieldType().distanceErrorPct());
// change orientation - this is allowed because existing dateline spanning shapes // change orientation - this is allowed because existing dateline spanning shapes
// have already been unwound and segmented // have already been unwound and segmented
this.fieldType().setOrientation(fieldMergeWith.fieldType().orientation()); this.fieldType().setOrientation(fieldMergeWith.fieldType().orientation());
@ -395,24 +430,21 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType()); builder.field("type", contentType());
// TODO: Come up with a better way to get the name, maybe pass it from builder if (includeDefaults || fieldType().tree().equals(Defaults.TREE) == false) {
if (fieldType().defaultStrategy().getGrid() instanceof GeohashPrefixTree) { builder.field(Names.TREE, fieldType().tree());
// Don't emit the tree name since GeohashPrefixTree is the default
// Only emit the tree levels if it isn't the default value
if (includeDefaults || fieldType().defaultStrategy().getGrid().getMaxLevels() != Defaults.GEOHASH_LEVELS) {
builder.field(Names.TREE_LEVELS, fieldType().defaultStrategy().getGrid().getMaxLevels());
} }
} else { if (includeDefaults || fieldType().treeLevels() != 0) {
builder.field(Names.TREE, Names.TREE_QUADTREE); builder.field(Names.TREE_LEVELS, fieldType().treeLevels());
if (includeDefaults || fieldType().defaultStrategy().getGrid().getMaxLevels() != Defaults.QUADTREE_LEVELS) {
builder.field(Names.TREE_LEVELS, fieldType().defaultStrategy().getGrid().getMaxLevels());
} }
if (includeDefaults || fieldType().precisionInMeters() != -1) {
builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(fieldType().precisionInMeters()));
} }
if (includeDefaults || fieldType().strategyName() != Defaults.STRATEGY) {
if (includeDefaults || fieldType().defaultStrategy().getDistErrPct() != Defaults.DISTANCE_ERROR_PCT) { builder.field(Names.STRATEGY, fieldType().strategyName());
builder.field(Names.DISTANCE_ERROR_PCT, fieldType().defaultStrategy().getDistErrPct()); }
if (includeDefaults || fieldType().distanceErrorPct() != fieldType().defaultDistanceErrorPct) {
builder.field(Names.DISTANCE_ERROR_PCT, fieldType().distanceErrorPct());
} }
if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) { if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) {
builder.field(Names.ORIENTATION, fieldType().orientation()); builder.field(Names.ORIENTATION, fieldType().orientation());
} }

View File

@ -46,6 +46,7 @@ import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.fieldNames; import static org.elasticsearch.index.mapper.MapperBuilders.fieldNames;
@ -148,6 +149,18 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
this.enabled = ref.enabled; this.enabled = ref.enabled;
} }
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
FieldNamesFieldType that = (FieldNamesFieldType) o;
return enabled == that.enabled;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), enabled);
}
public void setEnabled(boolean enabled) { public void setEnabled(boolean enabled) {
checkIfFrozen(); checkIfFrozen();
this.enabled = enabled; this.enabled = enabled;
@ -288,7 +301,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
} }
boolean includeDefaults = params.paramAsBoolean("include_defaults", false); boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
if (includeDefaults == false && fieldType().equals(Defaults.FIELD_TYPE) && fieldType().isEnabled() == Defaults.ENABLED) { if (includeDefaults == false && fieldType().isEnabled() == Defaults.ENABLED) {
return builder; return builder;
} }

View File

@ -229,12 +229,10 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
} }
private final String type; private final String type;
private final BytesRef typeAsBytes;
protected ParentFieldMapper(MappedFieldType fieldType, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) { protected ParentFieldMapper(MappedFieldType fieldType, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0), fieldDataSettings, indexSettings); super(fieldType, Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0), fieldDataSettings, indexSettings);
this.type = type; this.type = type;
this.typeAsBytes = type == null ? null : new BytesRef(type);
} }
public ParentFieldMapper(Settings indexSettings) { public ParentFieldMapper(Settings indexSettings) {

View File

@ -19,18 +19,14 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
@ -98,8 +94,8 @@ public class ExistsQueryBuilder extends QueryBuilder<ExistsQueryBuilder> {
return Queries.newMatchNoDocsQuery(); return Queries.newMatchNoDocsQuery();
} }
MapperService.SmartNameObjectMapper smartNameObjectMapper = parseContext.smartObjectMapper(fieldPattern); ObjectMapper objectMapper = parseContext.getObjectMapper(fieldPattern);
if (smartNameObjectMapper != null && smartNameObjectMapper.hasMapper()) { if (objectMapper != null) {
// automatic make the object mapper pattern // automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*"; fieldPattern = fieldPattern + ".*";
} }

View File

@ -19,14 +19,17 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
@ -95,8 +98,8 @@ public class MissingQueryParser extends BaseQueryParserTemp {
return Queries.newMatchNoDocsQuery(); return Queries.newMatchNoDocsQuery();
} }
MapperService.SmartNameObjectMapper smartNameObjectMapper = parseContext.smartObjectMapper(fieldPattern); ObjectMapper objectMapper = parseContext.getObjectMapper(fieldPattern);
if (smartNameObjectMapper != null && smartNameObjectMapper.hasMapper()) { if (objectMapper != null) {
// automatic make the object mapper pattern // automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*"; fieldPattern = fieldPattern + ".*";
} }

View File

@ -608,17 +608,6 @@ public abstract class QueryBuilders {
return new ScriptQueryBuilder(script); return new ScriptQueryBuilder(script);
} }
/**
* A builder for filter based on a script.
*
* @param script
* The script to filter by.
* @deprecated Use {@link #scriptQuery(Script)} instead.
*/
@Deprecated
public static ScriptQueryBuilder scriptQuery(String script) {
return new ScriptQueryBuilder(script);
}
/** /**
* A filter to filter based on a specific distance from a specific geo location / point. * A filter to filter based on a specific distance from a specific geo location / point.

View File

@ -40,6 +40,7 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
@ -329,8 +330,8 @@ public class QueryParseContext {
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes())); return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes()));
} }
public MapperService.SmartNameObjectMapper smartObjectMapper(String name) { public ObjectMapper getObjectMapper(String name) {
return indexQueryParser.mapperService.smartNameObjectMapper(name, getTypes()); return indexQueryParser.mapperService.getObjectMapper(name, getTypes());
} }
/** Gets the search analyzer for the given field, or the default if there is none present for the field /** Gets the search analyzer for the given field, or the default if there is none present for the field

View File

@ -29,71 +29,18 @@ import java.util.Map;
public class ScriptQueryBuilder extends QueryBuilder { public class ScriptQueryBuilder extends QueryBuilder {
private Script script;
public static final String NAME = "script"; public static final String NAME = "script";
@Deprecated
private String scriptString;
@Deprecated
private Map<String, Object> params;
@Deprecated
private String lang;
private String queryName;
static final ScriptQueryBuilder PROTOTYPE = new ScriptQueryBuilder((Script) null); static final ScriptQueryBuilder PROTOTYPE = new ScriptQueryBuilder((Script) null);
private Script script;
private String queryName;
public ScriptQueryBuilder(Script script) { public ScriptQueryBuilder(Script script) {
this.script = script; this.script = script;
} }
/**
* @deprecated Use {@link #ScriptQueryBuilder(Script)} instead.
*/
@Deprecated
public ScriptQueryBuilder(String script) {
this.scriptString = script;
}
/**
* @deprecated Use {@link #ScriptQueryBuilder(Script)} instead.
*/
@Deprecated
public ScriptQueryBuilder addParam(String name, Object value) {
if (params == null) {
params = new HashMap<>();
}
params.put(name, value);
return this;
}
/**
* @deprecated Use {@link #ScriptQueryBuilder(Script)} instead.
*/
@Deprecated
public ScriptQueryBuilder params(Map<String, Object> params) {
if (this.params == null) {
this.params = params;
} else {
this.params.putAll(params);
}
return this;
}
/**
* Sets the script language.
*
* @deprecated Use {@link #ScriptQueryBuilder(Script)} instead.
*/
@Deprecated
public ScriptQueryBuilder lang(String lang) {
this.lang = lang;
return this;
}
/** /**
* Sets the filter name for the filter that can be used when searching for matched_filters per hit. * Sets the filter name for the filter that can be used when searching for matched_filters per hit.
*/ */
@ -104,22 +51,8 @@ public class ScriptQueryBuilder extends QueryBuilder {
@Override @Override
protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException { protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException {
builder.startObject(NAME); builder.startObject(NAME);
if (script != null) {
builder.field(ScriptField.SCRIPT.getPreferredName(), script); builder.field(ScriptField.SCRIPT.getPreferredName(), script);
} else {
if (this.scriptString != null) {
builder.field("script", scriptString);
}
if (this.params != null) {
builder.field("params", this.params);
}
if (this.lang != null) {
builder.field("lang", lang);
}
}
if (queryName != null) { if (queryName != null) {
builder.field("_name", queryName); builder.field("_name", queryName);
} }

View File

@ -58,44 +58,11 @@ public class ScoreFunctionBuilders {
} }
public static ScriptScoreFunctionBuilder scriptFunction(Script script) { public static ScriptScoreFunctionBuilder scriptFunction(Script script) {
return (new ScriptScoreFunctionBuilder()).script(script); return (new ScriptScoreFunctionBuilder(script));
} }
/**
* @deprecated Use {@link #scriptFunction(Script)} instead.
*/
@Deprecated
public static ScriptScoreFunctionBuilder scriptFunction(String script) { public static ScriptScoreFunctionBuilder scriptFunction(String script) {
return (new ScriptScoreFunctionBuilder()).script(script); return (new ScriptScoreFunctionBuilder(new Script(script)));
}
/**
* @deprecated Use {@link #scriptFunction(Script)} instead.
*/
@Deprecated
public static ScriptScoreFunctionBuilder scriptFunction(String script, String lang) {
return (new ScriptScoreFunctionBuilder()).script(script).lang(lang);
}
/**
* @deprecated Use {@link #scriptFunction(Script)} instead.
*/
@Deprecated
public static ScriptScoreFunctionBuilder scriptFunction(String script, String lang, Map<String, Object> params) {
return (new ScriptScoreFunctionBuilder()).script(script).lang(lang).params(params);
}
/**
* @deprecated Use {@link #scriptFunction(Script)} instead.
*/
@Deprecated
public static ScriptScoreFunctionBuilder scriptFunction(String script, Map<String, Object> params) {
return (new ScriptScoreFunctionBuilder()).script(script).params(params);
}
@Deprecated
public static FactorBuilder factorFunction(float boost) {
return (new FactorBuilder()).boostFactor(boost);
} }
public static RandomScoreFunctionBuilder randomFunction(int seed) { public static RandomScoreFunctionBuilder randomFunction(int seed) {

View File

@ -34,85 +34,19 @@ import java.util.Map;
*/ */
public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder {
private Script script; private final Script script;
private String scriptString;
private String lang;
private Map<String, Object> params = null;
public ScriptScoreFunctionBuilder() {
public ScriptScoreFunctionBuilder(Script script) {
if (script == null) {
throw new IllegalArgumentException("script must not be null");
} }
public ScriptScoreFunctionBuilder script(Script script) {
this.script = script; this.script = script;
return this;
}
/**
* @deprecated Use {@link #script(Script)} instead
*/
@Deprecated
public ScriptScoreFunctionBuilder script(String script) {
this.scriptString = script;
return this;
}
/**
* Sets the language of the script.@deprecated Use {@link #script(Script)}
* instead
*/
@Deprecated
public ScriptScoreFunctionBuilder lang(String lang) {
this.lang = lang;
return this;
}
/**
* Additional parameters that can be provided to the script.@deprecated Use
* {@link #script(Script)} instead
*/
@Deprecated
public ScriptScoreFunctionBuilder params(Map<String, Object> params) {
if (this.params == null) {
this.params = params;
} else {
this.params.putAll(params);
}
return this;
}
/**
* Additional parameters that can be provided to the script.@deprecated Use
* {@link #script(Script)} instead
*/
@Deprecated
public ScriptScoreFunctionBuilder param(String key, Object value) {
if (params == null) {
params = new HashMap<>();
}
params.put(key, value);
return this;
} }
@Override @Override
public void doXContent(XContentBuilder builder, Params params) throws IOException { public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getName()); builder.startObject(getName());
if (script != null) {
builder.field(ScriptField.SCRIPT.getPreferredName(), script); builder.field(ScriptField.SCRIPT.getPreferredName(), script);
} else {
if (scriptString != null) {
builder.field("script", scriptString);
}
if (lang != null) {
builder.field("lang", lang);
}
if (this.params != null) {
builder.field("params", this.params);
}
}
builder.endObject(); builder.endObject();
} }

View File

@ -154,60 +154,6 @@ public abstract class BaseInnerHitBuilder<T extends BaseInnerHitBuilder> impleme
return (T) this; return (T) this;
} }
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param script
* The script to use
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public T addScriptField(String name, String script) {
sourceBuilder().scriptField(name, script);
return (T) this;
}
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param script
* The script to use
* @param params
* Parameters that the script can use.
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public T addScriptField(String name, String script, Map<String, Object> params) {
sourceBuilder().scriptField(name, script, params);
return (T) this;
}
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param lang
* The language of the script
* @param script
* The script to use
* @param params
* Parameters that the script can use (can be <tt>null</tt>).
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public T addScriptField(String name, String lang, String script, Map<String, Object> params) {
sourceBuilder().scriptField(name, lang, script, params);
return (T) this;
}
/** /**
* Adds a sort against the given field name and the sort ordering. * Adds a sort against the given field name and the sort ordering.
* *

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
@ -57,7 +56,6 @@ public class NestedInnerQueryParseSupport {
protected BitDocIdSetFilter parentFilter; protected BitDocIdSetFilter parentFilter;
protected BitDocIdSetFilter childFilter; protected BitDocIdSetFilter childFilter;
protected DocumentMapper childDocumentMapper;
protected ObjectMapper nestedObjectMapper; protected ObjectMapper nestedObjectMapper;
private ObjectMapper parentObjectMapper; private ObjectMapper parentObjectMapper;
@ -157,12 +155,7 @@ public class NestedInnerQueryParseSupport {
public void setPath(String path) { public void setPath(String path) {
this.path = path; this.path = path;
MapperService.SmartNameObjectMapper smart = parseContext.smartObjectMapper(path); nestedObjectMapper = parseContext.getObjectMapper(path);
if (smart == null) {
throw new QueryParsingException(parseContext, "[nested] failed to find nested object under path [" + path + "]");
}
childDocumentMapper = smart.docMapper();
nestedObjectMapper = smart.mapper();
if (nestedObjectMapper == null) { if (nestedObjectMapper == null) {
throw new QueryParsingException(parseContext, "[nested] failed to find nested object under path [" + path + "]"); throw new QueryParsingException(parseContext, "[nested] failed to find nested object under path [" + path + "]");
} }

View File

@ -104,7 +104,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
} }
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader()); IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity()); indexSearcher.setSimilarity(searcher.getSimilarity(true));
indexSearcher.setQueryCache(null); indexSearcher.setQueryCache(null);
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType); ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType);
indexSearcher.search(childQuery, collector); indexSearcher.search(childQuery, collector);

View File

@ -153,7 +153,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery().createWeight(searcher, needsScores);
} }
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader()); IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity()); indexSearcher.setSimilarity(searcher.getSimilarity(true));
indexSearcher.setQueryCache(null); indexSearcher.setQueryCache(null);
boolean abort = true; boolean abort = true;

View File

@ -93,7 +93,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType); ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType);
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader()); IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity()); indexSearcher.setSimilarity(searcher.getSimilarity(true));
indexSearcher.setQueryCache(null); indexSearcher.setQueryCache(null);
indexSearcher.search(parentQuery, collector); indexSearcher.search(parentQuery, collector);

View File

@ -130,7 +130,7 @@ public class ParentQuery extends IndexCacheableQuery {
try { try {
collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType); collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType);
IndexSearcher indexSearcher = new IndexSearcher(sc.searcher().getIndexReader()); IndexSearcher indexSearcher = new IndexSearcher(sc.searcher().getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity()); indexSearcher.setSimilarity(searcher.getSimilarity(true));
indexSearcher.setQueryCache(null); indexSearcher.setQueryCache(null);
indexSearcher.search(parentQuery, collector); indexSearcher.search(parentQuery, collector);
if (collector.parentCount() == 0) { if (collector.parentCount() == 0) {

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.shard;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.*; import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.ThreadInterruptedException; import org.apache.lucene.util.ThreadInterruptedException;
@ -88,7 +88,7 @@ import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.Store.MetadataSnapshot; import org.elasticsearch.index.store.Store.MetadataSnapshot;
import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.store.StoreFileMetaData;
import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.index.store.StoreStats;
import org.elasticsearch.index.suggest.stats.ShardSuggestService; import org.elasticsearch.index.suggest.stats.ShardSuggestMetric;
import org.elasticsearch.index.suggest.stats.SuggestStats; import org.elasticsearch.index.suggest.stats.SuggestStats;
import org.elasticsearch.index.termvectors.ShardTermVectorsService; import org.elasticsearch.index.termvectors.ShardTermVectorsService;
import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.Translog;
@ -142,7 +142,7 @@ public class IndexShard extends AbstractIndexShardComponent {
private final ShardTermVectorsService termVectorsService; private final ShardTermVectorsService termVectorsService;
private final IndexFieldDataService indexFieldDataService; private final IndexFieldDataService indexFieldDataService;
private final IndexService indexService; private final IndexService indexService;
private final ShardSuggestService shardSuggestService; private final ShardSuggestMetric shardSuggestMetric = new ShardSuggestMetric();
private final ShardBitsetFilterCache shardBitsetFilterCache; private final ShardBitsetFilterCache shardBitsetFilterCache;
private final DiscoveryNode localNode; private final DiscoveryNode localNode;
@ -192,9 +192,9 @@ public class IndexShard extends AbstractIndexShardComponent {
@Inject @Inject
public IndexShard(ShardId shardId, IndexSettingsService indexSettingsService, IndicesLifecycle indicesLifecycle, Store store, public IndexShard(ShardId shardId, IndexSettingsService indexSettingsService, IndicesLifecycle indicesLifecycle, Store store,
ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, ShardIndexingService indexingService, ShardGetService getService, ShardSearchService searchService, ShardIndexWarmerService shardWarmerService, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, ShardIndexingService indexingService, ShardSearchService searchService, ShardIndexWarmerService shardWarmerService,
ShardFilterCache shardFilterCache, ShardFieldData shardFieldData, PercolatorQueriesRegistry percolatorQueriesRegistry, ShardPercolateService shardPercolateService, CodecService codecService, ShardFilterCache shardFilterCache, ShardFieldData shardFieldData, PercolatorQueriesRegistry percolatorQueriesRegistry, ShardPercolateService shardPercolateService, CodecService codecService,
ShardTermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, IndexService indexService, ShardSuggestService shardSuggestService, ShardTermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, IndexService indexService,
ShardQueryCache shardQueryCache, ShardBitsetFilterCache shardBitsetFilterCache, ShardQueryCache shardQueryCache, ShardBitsetFilterCache shardBitsetFilterCache,
@Nullable IndicesWarmer warmer, SnapshotDeletionPolicy deletionPolicy, SimilarityService similarityService, EngineFactory factory, @Nullable IndicesWarmer warmer, SnapshotDeletionPolicy deletionPolicy, SimilarityService similarityService, EngineFactory factory,
ClusterService clusterService, NodeEnvironment nodeEnv, ShardPath path, BigArrays bigArrays) { ClusterService clusterService, NodeEnvironment nodeEnv, ShardPath path, BigArrays bigArrays) {
@ -216,7 +216,7 @@ public class IndexShard extends AbstractIndexShardComponent {
this.indexCache = indexCache; this.indexCache = indexCache;
this.indexAliasesService = indexAliasesService; this.indexAliasesService = indexAliasesService;
this.indexingService = indexingService; this.indexingService = indexingService;
this.getService = getService.setIndexShard(this); this.getService = new ShardGetService(this, mapperService);
this.termVectorsService = termVectorsService.setIndexShard(this); this.termVectorsService = termVectorsService.setIndexShard(this);
this.searchService = searchService; this.searchService = searchService;
this.shardWarmerService = shardWarmerService; this.shardWarmerService = shardWarmerService;
@ -227,7 +227,6 @@ public class IndexShard extends AbstractIndexShardComponent {
this.shardPercolateService = shardPercolateService; this.shardPercolateService = shardPercolateService;
this.indexFieldDataService = indexFieldDataService; this.indexFieldDataService = indexFieldDataService;
this.indexService = indexService; this.indexService = indexService;
this.shardSuggestService = shardSuggestService;
this.shardBitsetFilterCache = shardBitsetFilterCache; this.shardBitsetFilterCache = shardBitsetFilterCache;
assert clusterService.localNode() != null : "Local node is null lifecycle state is: " + clusterService.lifecycleState(); assert clusterService.localNode() != null : "Local node is null lifecycle state is: " + clusterService.lifecycleState();
this.localNode = clusterService.localNode(); this.localNode = clusterService.localNode();
@ -273,8 +272,8 @@ public class IndexShard extends AbstractIndexShardComponent {
return termVectorsService; return termVectorsService;
} }
public ShardSuggestService shardSuggestService() { public ShardSuggestMetric getSuggestMetric() {
return shardSuggestService; return shardSuggestMetric;
} }
public ShardBitsetFilterCache shardBitsetFilterCache() { public ShardBitsetFilterCache shardBitsetFilterCache() {
@ -646,7 +645,7 @@ public class IndexShard extends AbstractIndexShardComponent {
} }
public SuggestStats suggestStats() { public SuggestStats suggestStats() {
return shardSuggestService.stats(); return shardSuggestMetric.stats();
} }
public CompletionStats completionStats(String... fields) { public CompletionStats completionStats(String... fields) {

View File

@ -30,7 +30,6 @@ import org.elasticsearch.index.engine.InternalEngineFactory;
import org.elasticsearch.index.fielddata.ShardFieldData; import org.elasticsearch.index.fielddata.ShardFieldData;
import org.elasticsearch.index.gateway.IndexShardGateway; import org.elasticsearch.index.gateway.IndexShardGateway;
import org.elasticsearch.index.gateway.IndexShardGatewayService; import org.elasticsearch.index.gateway.IndexShardGatewayService;
import org.elasticsearch.index.get.ShardGetService;
import org.elasticsearch.index.indexing.ShardIndexingService; import org.elasticsearch.index.indexing.ShardIndexingService;
import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService; import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService;
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
@ -38,7 +37,6 @@ import org.elasticsearch.index.percolator.stats.ShardPercolateService;
import org.elasticsearch.index.search.slowlog.ShardSlowLogSearchService; import org.elasticsearch.index.search.slowlog.ShardSlowLogSearchService;
import org.elasticsearch.index.search.stats.ShardSearchService; import org.elasticsearch.index.search.stats.ShardSearchService;
import org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService; import org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService;
import org.elasticsearch.index.suggest.stats.ShardSuggestService;
import org.elasticsearch.index.termvectors.ShardTermVectorsService; import org.elasticsearch.index.termvectors.ShardTermVectorsService;
import org.elasticsearch.index.translog.TranslogService; import org.elasticsearch.index.translog.TranslogService;
import org.elasticsearch.index.warmer.ShardIndexWarmerService; import org.elasticsearch.index.warmer.ShardIndexWarmerService;
@ -92,7 +90,6 @@ public class IndexShardModule extends AbstractModule {
bind(ShardSlowLogIndexingService.class).asEagerSingleton(); bind(ShardSlowLogIndexingService.class).asEagerSingleton();
bind(ShardSearchService.class).asEagerSingleton(); bind(ShardSearchService.class).asEagerSingleton();
bind(ShardSlowLogSearchService.class).asEagerSingleton(); bind(ShardSlowLogSearchService.class).asEagerSingleton();
bind(ShardGetService.class).asEagerSingleton();
bind(ShardFilterCache.class).toInstance(shardFilterCache); bind(ShardFilterCache.class).toInstance(shardFilterCache);
bind(ShardQueryCache.class).asEagerSingleton(); bind(ShardQueryCache.class).asEagerSingleton();
bind(ShardBitsetFilterCache.class).asEagerSingleton(); bind(ShardBitsetFilterCache.class).asEagerSingleton();
@ -103,7 +100,6 @@ public class IndexShardModule extends AbstractModule {
bind(ShardPercolateService.class).asEagerSingleton(); bind(ShardPercolateService.class).asEagerSingleton();
bind(ShardTermVectorsService.class).asEagerSingleton(); bind(ShardTermVectorsService.class).asEagerSingleton();
bind(IndexShardSnapshotAndRestoreService.class).asEagerSingleton(); bind(IndexShardSnapshotAndRestoreService.class).asEagerSingleton();
bind(ShardSuggestService.class).asEagerSingleton();
} }

View File

@ -37,7 +37,6 @@ import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.fielddata.ShardFieldData; import org.elasticsearch.index.fielddata.ShardFieldData;
import org.elasticsearch.index.get.ShardGetService;
import org.elasticsearch.index.indexing.ShardIndexingService; import org.elasticsearch.index.indexing.ShardIndexingService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.MergeStats;
@ -48,7 +47,6 @@ import org.elasticsearch.index.search.stats.ShardSearchService;
import org.elasticsearch.index.settings.IndexSettingsService; import org.elasticsearch.index.settings.IndexSettingsService;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.suggest.stats.ShardSuggestService;
import org.elasticsearch.index.termvectors.ShardTermVectorsService; import org.elasticsearch.index.termvectors.ShardTermVectorsService;
import org.elasticsearch.index.warmer.ShardIndexWarmerService; import org.elasticsearch.index.warmer.ShardIndexWarmerService;
import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesLifecycle;
@ -71,21 +69,21 @@ public final class ShadowIndexShard extends IndexShard {
ThreadPool threadPool, MapperService mapperService, ThreadPool threadPool, MapperService mapperService,
IndexQueryParserService queryParserService, IndexCache indexCache, IndexQueryParserService queryParserService, IndexCache indexCache,
IndexAliasesService indexAliasesService, ShardIndexingService indexingService, IndexAliasesService indexAliasesService, ShardIndexingService indexingService,
ShardGetService getService, ShardSearchService searchService, ShardSearchService searchService,
ShardIndexWarmerService shardWarmerService, ShardFilterCache shardFilterCache, ShardIndexWarmerService shardWarmerService, ShardFilterCache shardFilterCache,
ShardFieldData shardFieldData, PercolatorQueriesRegistry percolatorQueriesRegistry, ShardFieldData shardFieldData, PercolatorQueriesRegistry percolatorQueriesRegistry,
ShardPercolateService shardPercolateService, CodecService codecService, ShardPercolateService shardPercolateService, CodecService codecService,
ShardTermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, ShardTermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService,
IndexService indexService, ShardSuggestService shardSuggestService, ShardQueryCache shardQueryCache, IndexService indexService, ShardQueryCache shardQueryCache,
ShardBitsetFilterCache shardBitsetFilterCache, @Nullable IndicesWarmer warmer, ShardBitsetFilterCache shardBitsetFilterCache, @Nullable IndicesWarmer warmer,
SnapshotDeletionPolicy deletionPolicy, SimilarityService similarityService, SnapshotDeletionPolicy deletionPolicy, SimilarityService similarityService,
EngineFactory factory, ClusterService clusterService, EngineFactory factory, ClusterService clusterService,
NodeEnvironment nodeEnv, ShardPath path, BigArrays bigArrays) throws IOException { NodeEnvironment nodeEnv, ShardPath path, BigArrays bigArrays) throws IOException {
super(shardId, indexSettingsService, indicesLifecycle, store, super(shardId, indexSettingsService, indicesLifecycle, store,
threadPool, mapperService, queryParserService, indexCache, indexAliasesService, threadPool, mapperService, queryParserService, indexCache, indexAliasesService,
indexingService, getService, searchService, shardWarmerService, shardFilterCache, indexingService, searchService, shardWarmerService, shardFilterCache,
shardFieldData, percolatorQueriesRegistry, shardPercolateService, codecService, shardFieldData, percolatorQueriesRegistry, shardPercolateService, codecService,
termVectorsService, indexFieldDataService, indexService, shardSuggestService, termVectorsService, indexFieldDataService, indexService,
shardQueryCache, shardBitsetFilterCache, warmer, deletionPolicy, similarityService, shardQueryCache, shardBitsetFilterCache, warmer, deletionPolicy, similarityService,
factory, clusterService, nodeEnv, path, bigArrays); factory, clusterService, nodeEnv, path, bigArrays);
} }

View File

@ -32,16 +32,10 @@ import java.util.concurrent.TimeUnit;
/** /**
* *
*/ */
public class ShardSuggestService extends AbstractIndexShardComponent { public final class ShardSuggestMetric {
private final MeanMetric suggestMetric = new MeanMetric(); private final MeanMetric suggestMetric = new MeanMetric();
private final CounterMetric currentMetric = new CounterMetric(); private final CounterMetric currentMetric = new CounterMetric();
@Inject
public ShardSuggestService(ShardId shardId, @IndexSettings Settings indexSettings) {
super(shardId, indexSettings);
}
/** /**
* Called before suggest * Called before suggest
*/ */
@ -64,5 +58,4 @@ public class ShardSuggestService extends AbstractIndexShardComponent {
public SuggestStats stats() { public SuggestStats stats() {
return new SuggestStats(suggestMetric.count(), TimeUnit.NANOSECONDS.toMillis(suggestMetric.sum()), currentMetric.count()); return new SuggestStats(suggestMetric.count(), TimeUnit.NANOSECONDS.toMillis(suggestMetric.sum()), currentMetric.count());
} }
} }

View File

@ -80,16 +80,6 @@ public class NodeService extends AbstractComponent {
this.httpServer = httpServer; this.httpServer = httpServer;
} }
@Deprecated
public void putNodeAttribute(String key, String value) {
putAttribute(key, value);
}
@Deprecated
public void removeNodeAttribute(String key) {
removeAttribute(key);
}
public synchronized void putAttribute(String key, String value) { public synchronized void putAttribute(String key, String value) {
serviceAttributes = new MapBuilder<>(serviceAttributes).put(key, value).immutableMap(); serviceAttributes = new MapBuilder<>(serviceAttributes).put(key, value).immutableMap();
} }

View File

@ -44,10 +44,10 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
@ -65,7 +65,11 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.*; import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResult;
@ -73,7 +77,11 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext; import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.*; import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
/** /**
@ -662,7 +670,7 @@ public class PercolateContext extends SearchContext {
} }
@Override @Override
public MapperService.SmartNameObjectMapper smartNameObjectMapper(String name) { public ObjectMapper getObjectMapper(String name) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }

View File

@ -134,11 +134,6 @@ public abstract class RestRequest extends ContextAndHeaderHolder implements ToXC
return Booleans.parseBoolean(param(key), defaultValue); return Booleans.parseBoolean(param(key), defaultValue);
} }
@Override @Deprecated
public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) {
return paramAsBoolean(key, defaultValue);
}
public TimeValue paramAsTime(String key, TimeValue defaultValue) { public TimeValue paramAsTime(String key, TimeValue defaultValue) {
return parseTimeValue(param(key), defaultValue, key); return parseTimeValue(param(key), defaultValue, key);
} }

View File

@ -61,21 +61,14 @@ public class Script implements ToXContent, Streamable {
* The inline script to execute. * The inline script to execute.
*/ */
public Script(String script) { public Script(String script) {
if (script == null) { this(script, null);
throw new IllegalArgumentException("The parameter script (String) must not be null in Script.");
}
this.script = script;
} }
/** /**
* For sub-classes to use to override the default language * For sub-classes to use to override the default language
*/ */
protected Script(String script, String lang) { protected Script(String script, String lang) {
if (script == null) { this(script, ScriptType.INLINE, lang, null);
throw new IllegalArgumentException("The parameter script (String) must not be null in Script.");
}
this.script = script;
this.lang = lang;
} }
/** /**
@ -93,7 +86,7 @@ public class Script implements ToXContent, Streamable {
* @param params * @param params
* The map of parameters the script will be executed with. * The map of parameters the script will be executed with.
*/ */
public Script(String script, ScriptType type, @Nullable String lang, @Nullable Map<String, Object> params) { public Script(String script, ScriptType type, @Nullable String lang, @Nullable Map<String, ? extends Object> params) {
if (script == null) { if (script == null) {
throw new IllegalArgumentException("The parameter script (String) must not be null in Script."); throw new IllegalArgumentException("The parameter script (String) must not be null in Script.");
} }
@ -103,7 +96,7 @@ public class Script implements ToXContent, Streamable {
this.script = script; this.script = script;
this.type = type; this.type = type;
this.lang = lang; this.lang = lang;
this.params = params; this.params = (Map<String, Object>)params;
} }
/** /**

View File

@ -64,6 +64,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucke
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeParser; import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeParser;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelModule; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelModule;
import org.elasticsearch.search.aggregations.pipeline.seriesarithmetic.SeriesArithmeticParser;
import java.util.List; import java.util.List;
@ -114,6 +115,7 @@ public class AggregationModule extends AbstractModule implements SpawnModules{
pipelineAggParsers.add(AvgBucketParser.class); pipelineAggParsers.add(AvgBucketParser.class);
pipelineAggParsers.add(SumBucketParser.class); pipelineAggParsers.add(SumBucketParser.class);
pipelineAggParsers.add(MovAvgParser.class); pipelineAggParsers.add(MovAvgParser.class);
pipelineAggParsers.add(SeriesArithmeticParser.class);
} }
/** /**

View File

@ -69,6 +69,7 @@ import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipel
import org.elasticsearch.search.aggregations.pipeline.derivative.InternalDerivative; import org.elasticsearch.search.aggregations.pipeline.derivative.InternalDerivative;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.TransportMovAvgModelModule; import org.elasticsearch.search.aggregations.pipeline.movavg.models.TransportMovAvgModelModule;
import org.elasticsearch.search.aggregations.pipeline.seriesarithmetic.SeriesArithmeticPipelineAggregator;
/** /**
* A module that registers all the transport streams for the addAggregation * A module that registers all the transport streams for the addAggregation
@ -127,6 +128,7 @@ public class TransportAggregationModule extends AbstractModule implements SpawnM
AvgBucketPipelineAggregator.registerStreams(); AvgBucketPipelineAggregator.registerStreams();
SumBucketPipelineAggregator.registerStreams(); SumBucketPipelineAggregator.registerStreams();
MovAvgPipelineAggregator.registerStreams(); MovAvgPipelineAggregator.registerStreams();
SeriesArithmeticPipelineAggregator.registerStreams();
} }
@Override @Override

View File

@ -80,85 +80,6 @@ public abstract class ValuesSourceAggregationBuilder<B extends ValuesSourceAggre
return (B) this; return (B) this;
} }
/**
* Sets the script which generates the values. If the script is configured
* along with the field (as in {@link #field(String)}), then this script
* will be treated as a {@code value script}. A <i>value script</i> will be
* applied on the values that are extracted from the field data (you can
* refer to that value in the script using the {@code _value} reserved
* variable). If only the script is configured (and the no field is
* configured next to it), then the script will be responsible to generate
* the values that will be aggregated.
*
* @param script
* The configured script.
* @return This builder (fluent interface support)
* @deprecated Use {@link #script(Script)} instead.
*/
@Deprecated
@SuppressWarnings("unchecked")
public B script(String script) {
this.scriptString = script;
return (B) this;
}
/**
* Sets the language of the script (if one is defined).
* <p/>
* Also see {@link #script(String)}.
*
* @param lang
* The language of the script.
* @return This builder (fluent interface support)
* @deprecated Use {@link #script(Script)} instead.
*/
@Deprecated
@SuppressWarnings("unchecked")
public B lang(String lang) {
this.lang = lang;
return (B) this;
}
/**
* Sets the value of a parameter that is used in the script (if one is
* configured).
*
* @param name
* The name of the parameter.
* @param value
* The value of the parameter.
* @return This builder (fluent interface support)
* @deprecated Use {@link #script(Script)} instead.
*/
@Deprecated
@SuppressWarnings("unchecked")
public B param(String name, Object value) {
if (params == null) {
params = Maps.newHashMap();
}
params.put(name, value);
return (B) this;
}
/**
* Sets the values of a parameters that are used in the script (if one is
* configured).
*
* @param params
* The the parameters.
* @return This builder (fluent interface support)
* @deprecated Use {@link #script(Script)} instead.
*/
@Deprecated
@SuppressWarnings("unchecked")
public B params(Map<String, Object> params) {
if (this.params == null) {
this.params = Maps.newHashMap();
}
this.params.putAll(params);
return (B) this;
}
/** /**
* Configure the value to use when documents miss a value. * Configure the value to use when documents miss a value.
*/ */

View File

@ -156,11 +156,7 @@ public class NestedAggregator extends SingleBucketAggregator {
if (collectsFromSingleBucket == false) { if (collectsFromSingleBucket == false) {
return asMultiBucketAggregator(this, context, parent); return asMultiBucketAggregator(this, context, parent);
} }
MapperService.SmartNameObjectMapper mapper = context.searchContext().smartNameObjectMapper(path); ObjectMapper objectMapper = context.searchContext().getObjectMapper(path);
if (mapper == null) {
return new Unmapped(name, context, parent, pipelineAggregators, metaData);
}
ObjectMapper objectMapper = mapper.mapper();
if (objectMapper == null) { if (objectMapper == null) {
return new Unmapped(name, context, parent, pipelineAggregators, metaData); return new Unmapped(name, context, parent, pipelineAggregators, metaData);
} }

View File

@ -145,11 +145,7 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
final ObjectMapper objectMapper; final ObjectMapper objectMapper;
if (path != null) { if (path != null) {
MapperService.SmartNameObjectMapper mapper = context.searchContext().smartNameObjectMapper(path); objectMapper = context.searchContext().getObjectMapper(path);
if (mapper == null) {
return new Unmapped(name, context, parent, pipelineAggregators, metaData);
}
objectMapper = mapper.mapper();
if (objectMapper == null) { if (objectMapper == null) {
return new Unmapped(name, context, parent, pipelineAggregators, metaData); return new Unmapped(name, context, parent, pipelineAggregators, metaData);
} }

View File

@ -189,79 +189,17 @@ public class ScriptHeuristic extends SignificanceHeuristic {
public static class ScriptHeuristicBuilder implements SignificanceHeuristicBuilder { public static class ScriptHeuristicBuilder implements SignificanceHeuristicBuilder {
private Script script = null; private Script script = null;
private String scriptString = null;
private ScriptType type = null;
private String lang = null;
private Map<String, Object> params = null;
public ScriptHeuristicBuilder setScript(Script script) { public ScriptHeuristicBuilder setScript(Script script) {
this.script = script; this.script = script;
return this; return this;
} }
/**
* @deprecated use {@link #setScript(Script)}
*/
@Deprecated
public ScriptHeuristicBuilder setScript(String script) {
if (script != null) {
this.scriptString = script;
this.type = ScriptType.INLINE;
}
return this;
}
/**
* @deprecated use {@link #setScript(Script)}
*/
@Deprecated
public ScriptHeuristicBuilder setScriptFile(String script) {
if (script != null) {
this.scriptString = script;
this.type = ScriptType.FILE;
}
return this;
}
/**
* @deprecated use {@link #setScript(Script)}
*/
@Deprecated
public ScriptHeuristicBuilder setLang(String lang) {
this.lang = lang;
return this;
}
/**
* @deprecated use {@link #setScript(Script)}
*/
@Deprecated
public ScriptHeuristicBuilder setParams(Map<String, Object> params) {
this.params = params;
return this;
}
/**
* @deprecated use {@link #setScript(Script)}
*/
@Deprecated
public ScriptHeuristicBuilder setScriptId(String scriptId) {
if (scriptId != null) {
this.scriptString = scriptId;
this.type = ScriptType.INDEXED;
}
return this;
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException {
builder.startObject(STREAM.getName()); builder.startObject(STREAM.getName());
builder.field(ScriptField.SCRIPT.getPreferredName()); builder.field(ScriptField.SCRIPT.getPreferredName());
if (script == null) {
new Script(scriptString, type, lang, params).toXContent(builder, builderParams);
} else {
script.toXContent(builder, builderParams); script.toXContent(builder, builderParams);
}
builder.endObject(); builder.endObject();
return builder; return builder;
} }

View File

@ -37,34 +37,6 @@ public class ScriptedMetricBuilder extends MetricsAggregationBuilder {
private Script combineScript = null; private Script combineScript = null;
private Script reduceScript = null; private Script reduceScript = null;
private Map<String, Object> params = null; private Map<String, Object> params = null;
@Deprecated
private Map<String, Object> reduceParams = null;
@Deprecated
private String initScriptString = null;
@Deprecated
private String mapScriptString = null;
@Deprecated
private String combineScriptString = null;
@Deprecated
private String reduceScriptString = null;
@Deprecated
private String initScriptFile = null;
@Deprecated
private String mapScriptFile = null;
@Deprecated
private String combineScriptFile = null;
@Deprecated
private String reduceScriptFile = null;
@Deprecated
private String initScriptId = null;
@Deprecated
private String mapScriptId = null;
@Deprecated
private String combineScriptId = null;
@Deprecated
private String reduceScriptId = null;
@Deprecated
private String lang = null;
/** /**
* Sole constructor. * Sole constructor.
@ -114,162 +86,6 @@ public class ScriptedMetricBuilder extends MetricsAggregationBuilder {
return this; return this;
} }
/**
* Set parameters that will be available in the <tt>reduce</tt> phase.
*
* @deprecated Use {@link #reduceScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder reduceParams(Map<String, Object> reduceParams) {
this.reduceParams = reduceParams;
return this;
}
/**
* Set the <tt>init</tt> script.
*
* @deprecated Use {@link #initScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder initScript(String initScript) {
this.initScriptString = initScript;
return this;
}
/**
* Set the <tt>map</tt> script.
*
* @deprecated Use {@link #mapScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder mapScript(String mapScript) {
this.mapScriptString = mapScript;
return this;
}
/**
* Set the <tt>combine</tt> script.
*
* @deprecated Use {@link #combineScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder combineScript(String combineScript) {
this.combineScriptString = combineScript;
return this;
}
/**
* Set the <tt>reduce</tt> script.
*
* @deprecated Use {@link #reduceScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder reduceScript(String reduceScript) {
this.reduceScriptString = reduceScript;
return this;
}
/**
* Set the <tt>init</tt> script file.
*
* @deprecated Use {@link #initScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder initScriptFile(String initScriptFile) {
this.initScriptFile = initScriptFile;
return this;
}
/**
* Set the <tt>map</tt> script file.
*
* @deprecated Use {@link #mapScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder mapScriptFile(String mapScriptFile) {
this.mapScriptFile = mapScriptFile;
return this;
}
/**
* Set the <tt>combine</tt> script file.
*
* @deprecated Use {@link #combineScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder combineScriptFile(String combineScriptFile) {
this.combineScriptFile = combineScriptFile;
return this;
}
/**
* Set the <tt>reduce</tt> script file.
*
* @deprecated Use {@link #reduceScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder reduceScriptFile(String reduceScriptFile) {
this.reduceScriptFile = reduceScriptFile;
return this;
}
/**
* Set the indexed <tt>init</tt> script id.
*
* @deprecated Use {@link #initScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder initScriptId(String initScriptId) {
this.initScriptId = initScriptId;
return this;
}
/**
* Set the indexed <tt>map</tt> script id.
*
* @deprecated Use {@link #mapScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder mapScriptId(String mapScriptId) {
this.mapScriptId = mapScriptId;
return this;
}
/**
* Set the indexed <tt>combine</tt> script id.
*
* @deprecated Use {@link #combineScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder combineScriptId(String combineScriptId) {
this.combineScriptId = combineScriptId;
return this;
}
/**
* Set the indexed <tt>reduce</tt> script id.
*
* @deprecated Use {@link #reduceScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder reduceScriptId(String reduceScriptId) {
this.reduceScriptId = reduceScriptId;
return this;
}
/**
* Set the script language.
*
* @deprecated Use {@link #initScript(Script)}, {@link #mapScript(Script)},
* {@link #combineScript(Script)}, and
* {@link #reduceScript(Script)} instead.
*/
@Deprecated
public ScriptedMetricBuilder lang(String lang) {
this.lang = lang;
return this;
}
@Override @Override
protected void internalXContent(XContentBuilder builder, Params builderParams) throws IOException { protected void internalXContent(XContentBuilder builder, Params builderParams) throws IOException {
@ -288,68 +104,10 @@ public class ScriptedMetricBuilder extends MetricsAggregationBuilder {
if (reduceScript != null) { if (reduceScript != null) {
builder.field(ScriptedMetricParser.REDUCE_SCRIPT_FIELD.getPreferredName(), reduceScript); builder.field(ScriptedMetricParser.REDUCE_SCRIPT_FIELD.getPreferredName(), reduceScript);
} }
if (params != null) { if (params != null) {
builder.field(ScriptedMetricParser.PARAMS_FIELD.getPreferredName()); builder.field(ScriptedMetricParser.PARAMS_FIELD.getPreferredName());
builder.map(params); builder.map(params);
} }
if (reduceParams != null) {
builder.field(ScriptedMetricParser.REDUCE_PARAMS_FIELD.getPreferredName());
builder.map(reduceParams);
}
if (initScriptString != null) {
builder.field(ScriptedMetricParser.INIT_SCRIPT, initScriptString);
}
if (mapScriptString != null) {
builder.field(ScriptedMetricParser.MAP_SCRIPT, mapScriptString);
}
if (combineScriptString != null) {
builder.field(ScriptedMetricParser.COMBINE_SCRIPT, combineScriptString);
}
if (reduceScriptString != null) {
builder.field(ScriptedMetricParser.REDUCE_SCRIPT, reduceScriptString);
}
if (initScriptFile != null) {
builder.field(ScriptedMetricParser.INIT_SCRIPT + ScriptParameterParser.FILE_SUFFIX, initScriptFile);
}
if (mapScriptFile != null) {
builder.field(ScriptedMetricParser.MAP_SCRIPT + ScriptParameterParser.FILE_SUFFIX, mapScriptFile);
}
if (combineScriptFile != null) {
builder.field(ScriptedMetricParser.COMBINE_SCRIPT + ScriptParameterParser.FILE_SUFFIX, combineScriptFile);
}
if (reduceScriptFile != null) {
builder.field(ScriptedMetricParser.REDUCE_SCRIPT + ScriptParameterParser.FILE_SUFFIX, reduceScriptFile);
}
if (initScriptId != null) {
builder.field(ScriptedMetricParser.INIT_SCRIPT + ScriptParameterParser.INDEXED_SUFFIX, initScriptId);
}
if (mapScriptId != null) {
builder.field(ScriptedMetricParser.MAP_SCRIPT + ScriptParameterParser.INDEXED_SUFFIX, mapScriptId);
}
if (combineScriptId != null) {
builder.field(ScriptedMetricParser.COMBINE_SCRIPT + ScriptParameterParser.INDEXED_SUFFIX, combineScriptId);
}
if (reduceScriptId != null) {
builder.field(ScriptedMetricParser.REDUCE_SCRIPT + ScriptParameterParser.INDEXED_SUFFIX, reduceScriptId);
}
if (lang != null) {
builder.field(ScriptedMetricParser.LANG_FIELD.getPreferredName(), lang);
}
} }
} }

View File

@ -152,60 +152,6 @@ public class TopHitsBuilder extends AbstractAggregationBuilder {
return this; return this;
} }
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param script
* The script to use
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public TopHitsBuilder addScriptField(String name, String script) {
sourceBuilder().scriptField(name, script);
return this;
}
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param script
* The script to use
* @param params
* Parameters that the script can use.
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public TopHitsBuilder addScriptField(String name, String script, Map<String, Object> params) {
sourceBuilder().scriptField(name, script, params);
return this;
}
/**
* Adds a script based field to load and return. The field does not have to
* be stored, but its recommended to use non analyzed or numeric fields.
*
* @param name
* The name that will represent this value in the return hit
* @param lang
* The language of the script
* @param script
* The script to use
* @param params
* Parameters that the script can use (can be <tt>null</tt>).
* @deprecated Use {@link #addScriptField(String, Script)} instead.
*/
@Deprecated
public TopHitsBuilder addScriptField(String name, String lang, String script, Map<String, Object> params) {
sourceBuilder().scriptField(name, lang, script, params);
return this;
}
/** /**
* Adds a sort against the given field name and the sort ordering. * Adds a sort against the given field name and the sort ordering.
* *

View File

@ -172,7 +172,8 @@ public class BucketHelpers {
value = ((InternalNumericMetricsAggregation.SingleValue) propertyValue).value(); value = ((InternalNumericMetricsAggregation.SingleValue) propertyValue).value();
} else { } else {
throw new AggregationExecutionException(DerivativeParser.BUCKETS_PATH.getPreferredName() throw new AggregationExecutionException(DerivativeParser.BUCKETS_PATH.getPreferredName()
+ " must reference either a number value or a single value numeric metric aggregation"); + " must reference either a number value or a single value numeric metric aggregation, got: "
+ propertyValue.getClass().getCanonicalName());
} }
// doc count never has missing values so gap policy doesn't apply here // doc count never has missing values so gap policy doesn't apply here
boolean isDocCountProperty = aggPathAsList.size() == 1 && "_count".equals(aggPathAsList.get(0)); boolean isDocCountProperty = aggPathAsList.size() == 1 && "_count".equals(aggPathAsList.get(0));

View File

@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucke
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder;
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeBuilder; import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeBuilder;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgBuilder;
import org.elasticsearch.search.aggregations.pipeline.seriesarithmetic.SeriesArithmeticBuilder;
public final class PipelineAggregatorBuilders { public final class PipelineAggregatorBuilders {
@ -54,4 +55,8 @@ public final class PipelineAggregatorBuilders {
public static final MovAvgBuilder movingAvg(String name) { public static final MovAvgBuilder movingAvg(String name) {
return new MovAvgBuilder(name); return new MovAvgBuilder(name);
} }
public static final SeriesArithmeticBuilder seriesArithmetic(String name) {
return new SeriesArithmeticBuilder(name);
}
} }

View File

@ -0,0 +1,83 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.seriesarithmetic;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import java.io.IOException;
import java.util.Map;
public class SeriesArithmeticBuilder extends PipelineAggregatorBuilder<SeriesArithmeticBuilder> {
private String format;
private GapPolicy gapPolicy;
private Script script;
private Map<String, String> bucketsPathsMap;
public SeriesArithmeticBuilder(String name) {
super(name, SeriesArithmeticPipelineAggregator.TYPE.name());
}
public SeriesArithmeticBuilder script(Script script) {
this.script = script;
return this;
}
public SeriesArithmeticBuilder format(String format) {
this.format = format;
return this;
}
public SeriesArithmeticBuilder gapPolicy(GapPolicy gapPolicy) {
this.gapPolicy = gapPolicy;
return this;
}
/**
* Sets the paths to the buckets to use for this pipeline aggregator
*/
public SeriesArithmeticBuilder setBucketsPathsMap(Map<String, String> bucketsPathsMap) {
this.bucketsPathsMap = bucketsPathsMap;
return this;
}
@Override
protected XContentBuilder internalXContent(XContentBuilder builder, Params builderParams) throws IOException {
if (script != null) {
builder.field(ScriptField.SCRIPT.getPreferredName(), script);
}
if (format != null) {
builder.field(SeriesArithmeticParser.FORMAT.getPreferredName(), format);
}
if (gapPolicy != null) {
builder.field(SeriesArithmeticParser.GAP_POLICY.getPreferredName(), gapPolicy.getName());
}
if (bucketsPathsMap != null) {
builder.field(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName(), bucketsPathsMap);
}
return builder;
}
}

View File

@ -0,0 +1,129 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.seriesarithmetic;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SeriesArithmeticParser implements PipelineAggregator.Parser {
public static final ParseField FORMAT = new ParseField("format");
public static final ParseField GAP_POLICY = new ParseField("gap_policy");
public static final ParseField PARAMS_FIELD = new ParseField("params");
@Override
public String type() {
return SeriesArithmeticPipelineAggregator.TYPE.name();
}
@Override
public PipelineAggregatorFactory parse(String reducerName, XContentParser parser, SearchContext context) throws IOException {
XContentParser.Token token;
Script script = null;
String currentFieldName = null;
Map<String, String> bucketsPathsMap = null;
String format = null;
GapPolicy gapPolicy = GapPolicy.SKIP;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if (FORMAT.match(currentFieldName)) {
format = parser.text();
} else if (BUCKETS_PATH.match(currentFieldName)) {
bucketsPathsMap = new HashMap<>();
bucketsPathsMap.put("_value", parser.text());
} else if (GAP_POLICY.match(currentFieldName)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
} else if (ScriptField.SCRIPT.match(currentFieldName)) {
script = Script.parse(parser);
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) {
List<String> paths = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String path = parser.text();
paths.add(path);
}
bucketsPathsMap = new HashMap<>();
for (int i = 0; i < paths.size(); i++) {
bucketsPathsMap.put("_value" + i, paths.get(i));
}
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (ScriptField.SCRIPT.match(currentFieldName)) {
script = Script.parse(parser);
} else if (BUCKETS_PATH.match(currentFieldName)) {
Map<String, Object> map = parser.map();
bucketsPathsMap = new HashMap<>();
for (Map.Entry<String, Object> entry : map.entrySet()) {
bucketsPathsMap.put(entry.getKey(), String.valueOf(entry.getValue()));
}
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + reducerName + "].",
parser.getTokenLocation());
}
}
if (bucketsPathsMap == null) {
throw new SearchParseException(context, "Missing required field [" + BUCKETS_PATH.getPreferredName()
+ "] for series_arithmetic aggregation [" + reducerName + "]", parser.getTokenLocation());
}
if (script == null) {
throw new SearchParseException(context, "Missing required field [" + ScriptField.SCRIPT.getPreferredName()
+ "] for series_arithmetic aggregation [" + reducerName + "]", parser.getTokenLocation());
}
ValueFormatter formatter = null;
if (format != null) {
formatter = ValueFormat.Patternable.Number.format(format).formatter();
}
return new SeriesArithmeticPipelineAggregator.Factory(reducerName, bucketsPathsMap, script, formatter, gapPolicy);
}
}

View File

@ -0,0 +1,179 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.seriesarithmetic;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
public class SeriesArithmeticPipelineAggregator extends PipelineAggregator {
public final static Type TYPE = new Type("series_arithmetic");
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
@Override
public SeriesArithmeticPipelineAggregator readResult(StreamInput in) throws IOException {
SeriesArithmeticPipelineAggregator result = new SeriesArithmeticPipelineAggregator();
result.readFrom(in);
return result;
}
};
public static void registerStreams() {
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
}
private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() {
@Override
public InternalAggregation apply(Aggregation input) {
return (InternalAggregation) input;
}
};
private ValueFormatter formatter;
private GapPolicy gapPolicy;
private Script script;
private Map<String, String> bucketsPathsMap;
public SeriesArithmeticPipelineAggregator() {
}
public SeriesArithmeticPipelineAggregator(String name, Map<String, String> bucketsPathsMap, Script script, @Nullable ValueFormatter formatter,
GapPolicy gapPolicy, Map<String, Object> metadata) {
super(name, bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]), metadata);
this.bucketsPathsMap = bucketsPathsMap;
this.script = script;
this.formatter = formatter;
this.gapPolicy = gapPolicy;
}
@Override
public Type type() {
return TYPE;
}
@Override
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket> originalAgg = (InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>) aggregation;
List<? extends Bucket> buckets = originalAgg.getBuckets();
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS);
List newBuckets = new ArrayList<>();
for (Bucket bucket : buckets) {
Map<String, Object> vars = new HashMap<>();
if (script.getParams() != null) {
vars.putAll(script.getParams());
}
for (Map.Entry<String, String> entry : bucketsPathsMap.entrySet()) {
String varName = entry.getKey();
String bucketsPath = entry.getValue();
Double value = resolveBucketValue(originalAgg, bucket, bucketsPath, gapPolicy);
vars.put(varName, value);
}
ExecutableScript executableScript = reduceContext.scriptService().executable(compiledScript, vars);
Object returned = executableScript.run();
if (returned == null) {
newBuckets.add(bucket);
} else {
if (!(returned instanceof Number)) {
throw new AggregationExecutionException("series_arithmetic script for reducer [" + name() + "] must return a Number");
}
List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), FUNCTION));
aggs.add(new InternalSimpleValue(name(), ((Number) returned).doubleValue(), formatter, new ArrayList<PipelineAggregator>(),
metaData()));
InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs),
(InternalMultiBucketAggregation.InternalBucket) bucket);
newBuckets.add(newBucket);
}
}
return originalAgg.create(newBuckets);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
script.writeTo(out);
ValueFormatterStreams.writeOptional(formatter, out);
gapPolicy.writeTo(out);
out.writeGenericValue(bucketsPathsMap);
}
@SuppressWarnings("unchecked")
@Override
protected void doReadFrom(StreamInput in) throws IOException {
script = Script.readScript(in);
formatter = ValueFormatterStreams.readOptional(in);
gapPolicy = GapPolicy.readFrom(in);
bucketsPathsMap = (Map<String, String>) in.readGenericValue();
}
public static class Factory extends PipelineAggregatorFactory {
private Script script;
private final ValueFormatter formatter;
private GapPolicy gapPolicy;
private Map<String, String> bucketsPathsMap;
public Factory(String name, Map<String, String> bucketsPathsMap, Script script, @Nullable ValueFormatter formatter, GapPolicy gapPolicy) {
super(name, TYPE.name(), bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]));
this.bucketsPathsMap = bucketsPathsMap;
this.script = script;
this.formatter = formatter;
this.gapPolicy = gapPolicy;
}
@Override
protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException {
return new SeriesArithmeticPipelineAggregator(name, bucketsPathsMap, script, formatter, gapPolicy, metaData);
}
}
}

View File

@ -605,54 +605,6 @@ public class SearchSourceBuilder extends ToXContentToBytes {
return this; return this;
} }
/**
* Adds a script field under the given name with the provided script.
*
* @param name
* The name of the field
* @param script
* The script
* @deprecated Use {@link #scriptField(String, Script)} instead.
*/
@Deprecated
public SearchSourceBuilder scriptField(String name, String script) {
return scriptField(name, null, script, null);
}
/**
* Adds a script field.
*
* @param name
* The name of the field
* @param script
* The script to execute
* @param params
* The script parameters
* @deprecated Use {@link #scriptField(String, Script)} instead.
*/
@Deprecated
public SearchSourceBuilder scriptField(String name, String script, Map<String, Object> params) {
return scriptField(name, null, script, params);
}
/**
* Adds a script field.
*
* @param name
* The name of the field
* @param lang
* The language of the script
* @param script
* The script to execute
* @param params
* The script parameters (can be <tt>null</tt>)
* @deprecated Use {@link #scriptField(String, Script)} instead.
*/
@Deprecated
public SearchSourceBuilder scriptField(String name, String lang, String script, Map<String, Object> params) {
return scriptField(name, new Script(script, ScriptType.INLINE, lang, params));
}
/** /**
* Sets the boost a specific index will receive when the query is executeed * Sets the boost a specific index will receive when the query is executeed
* against it. * against it.

View File

@ -44,7 +44,6 @@ import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.fieldvisitor.JustUidFieldsVisitor; import org.elasticsearch.index.fieldvisitor.JustUidFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.UidAndSourceFieldsVisitor; import org.elasticsearch.index.fieldvisitor.UidAndSourceFieldsVisitor;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
@ -145,7 +144,7 @@ public class FetchPhase implements SearchPhase {
MappedFieldType fieldType = context.smartNameFieldType(fieldName); MappedFieldType fieldType = context.smartNameFieldType(fieldName);
if (fieldType == null) { if (fieldType == null) {
// Only fail if we know it is a object field, missing paths / fields shouldn't fail. // Only fail if we know it is a object field, missing paths / fields shouldn't fail.
if (context.smartNameObjectMapper(fieldName) != null) { if (context.getObjectMapper(fieldName) != null) {
throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field"); throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field");
} }
} else if (fieldType.stored()) { } else if (fieldType.stored()) {

View File

@ -154,19 +154,18 @@ public class InnerHitsParseElement implements SearchParseElement {
} }
private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String nestedPath) throws Exception { private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String nestedPath) throws Exception {
MapperService.SmartNameObjectMapper smartNameObjectMapper = searchContext.smartNameObjectMapper(nestedPath); ObjectMapper objectMapper = searchContext.getObjectMapper(nestedPath);
if (smartNameObjectMapper == null || !smartNameObjectMapper.hasMapper()) { if (objectMapper == null) {
throw new IllegalArgumentException("path [" + nestedPath +"] doesn't exist"); throw new IllegalArgumentException("path [" + nestedPath +"] doesn't exist");
} }
ObjectMapper childObjectMapper = smartNameObjectMapper.mapper(); if (objectMapper.nested().isNested() == false) {
if (!childObjectMapper.nested().isNested()) {
throw new IllegalArgumentException("path [" + nestedPath +"] isn't nested"); throw new IllegalArgumentException("path [" + nestedPath +"] isn't nested");
} }
ObjectMapper parentObjectMapper = parseContext.nestedScope().nextLevel(childObjectMapper); ObjectMapper parentObjectMapper = parseContext.nestedScope().nextLevel(objectMapper);
ParseResult parseResult = parseSubSearchContext(searchContext, parseContext, parser); ParseResult parseResult = parseSubSearchContext(searchContext, parseContext, parser);
parseContext.nestedScope().previousLevel(); parseContext.nestedScope().previousLevel();
return new InnerHitsContext.NestedInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), parentObjectMapper, childObjectMapper); return new InnerHitsContext.NestedInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), parentObjectMapper, objectMapper);
} }
private ParseResult parseSubSearchContext(SearchContext searchContext, QueryParseContext parseContext, XContentParser parser) throws Exception { private ParseResult parseSubSearchContext(SearchContext searchContext, QueryParseContext parseContext, XContentParser parser) throws Exception {

View File

@ -70,7 +70,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
super(searcher.reader()); super(searcher.reader());
in = searcher.searcher(); in = searcher.searcher();
this.searchContext = searchContext; this.searchContext = searchContext;
setSimilarity(searcher.searcher().getSimilarity()); setSimilarity(searcher.searcher().getSimilarity(true));
} }
@Override @Override

View File

@ -49,9 +49,9 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
@ -723,8 +723,8 @@ public class DefaultSearchContext extends SearchContext {
} }
@Override @Override
public MapperService.SmartNameObjectMapper smartNameObjectMapper(String name) { public ObjectMapper getObjectMapper(String name) {
return mapperService().smartNameObjectMapper(name, request.types()); return mapperService().getObjectMapper(name, request.types());
} }
@Override @Override

View File

@ -34,9 +34,9 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
@ -542,8 +542,8 @@ public abstract class FilteredSearchContext extends SearchContext {
} }
@Override @Override
public MapperService.SmartNameObjectMapper smartNameObjectMapper(String name) { public ObjectMapper getObjectMapper(String name) {
return in.smartNameObjectMapper(name); return in.getObjectMapper(name);
} }
@Override @Override

View File

@ -37,6 +37,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
@ -346,7 +347,7 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
*/ */
public abstract MappedFieldType smartNameFieldTypeFromAnyType(String name); public abstract MappedFieldType smartNameFieldTypeFromAnyType(String name);
public abstract MapperService.SmartNameObjectMapper smartNameObjectMapper(String name); public abstract ObjectMapper getObjectMapper(String name);
public abstract Counter timeEstimateCounter(); public abstract Counter timeEstimateCounter();

View File

@ -564,7 +564,7 @@ public class ThreadPool extends AbstractComponent {
EstimatedTimeThread(String name, long interval) { EstimatedTimeThread(String name, long interval) {
super(name); super(name);
this.interval = interval; this.interval = interval;
this.estimatedTimeInMillis = System.currentTimeMillis(); this.estimatedTimeInMillis = TimeValue.nsecToMSec(System.nanoTime());
this.counter = new TimeCounter(); this.counter = new TimeCounter();
setDaemon(true); setDaemon(true);
} }
@ -576,7 +576,7 @@ public class ThreadPool extends AbstractComponent {
@Override @Override
public void run() { public void run() {
while (running) { while (running) {
estimatedTimeInMillis = System.currentTimeMillis(); estimatedTimeInMillis = TimeValue.nsecToMSec(System.nanoTime());
try { try {
Thread.sleep(interval); Thread.sleep(interval);
} catch (InterruptedException e) { } catch (InterruptedException e) {

View File

@ -119,83 +119,4 @@ public class UpdateRequestTests extends ElasticsearchTestCase {
assertThat(doc.get("field1").toString(), equalTo("value1")); assertThat(doc.get("field1").toString(), equalTo("value1"));
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
} }
/*
* TODO Remove in 2.0
*/
@Test
public void testUpdateRequestOldAPI() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type", "1");
// simple script
request.source(XContentFactory.jsonBuilder().startObject().field("script", "script1").endObject());
assertThat(request.scriptString(), equalTo("script1"));
// script with params
request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject()
.field("script", "script1")
.startObject("params").field("param1", "value1").endObject()
.endObject());
assertThat(request.scriptString(), notNullValue());
assertThat(request.scriptString(), equalTo("script1"));
assertThat(request.scriptParams().get("param1").toString(), equalTo("value1"));
request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject()
.startObject("params").field("param1", "value1").endObject()
.field("script", "script1")
.endObject());
assertThat(request.scriptString(), notNullValue());
assertThat(request.scriptString(), equalTo("script1"));
assertThat(request.scriptParams().get("param1").toString(), equalTo("value1"));
// script with params and upsert
request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject()
.startObject("params").field("param1", "value1").endObject()
.field("script", "script1")
.startObject("upsert").field("field1", "value1").startObject("compound").field("field2", "value2").endObject().endObject()
.endObject());
assertThat(request.scriptString(), notNullValue());
assertThat(request.scriptString(), equalTo("script1"));
assertThat(request.scriptParams().get("param1").toString(), equalTo("value1"));
Map<String, Object> upsertDoc = XContentHelper.convertToMap(request.upsertRequest().source(), true).v2();
assertThat(upsertDoc.get("field1").toString(), equalTo("value1"));
assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject()
.startObject("upsert").field("field1", "value1").startObject("compound").field("field2", "value2").endObject().endObject()
.startObject("params").field("param1", "value1").endObject()
.field("script", "script1")
.endObject());
assertThat(request.scriptString(), notNullValue());
assertThat(request.scriptString(), equalTo("script1"));
assertThat(request.scriptParams().get("param1").toString(), equalTo("value1"));
upsertDoc = XContentHelper.convertToMap(request.upsertRequest().source(), true).v2();
assertThat(upsertDoc.get("field1").toString(), equalTo("value1"));
assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject()
.startObject("params").field("param1", "value1").endObject()
.startObject("upsert").field("field1", "value1").startObject("compound").field("field2", "value2").endObject().endObject()
.field("script", "script1")
.endObject());
assertThat(request.scriptString(), notNullValue());
assertThat(request.scriptString(), equalTo("script1"));
assertThat(request.scriptParams().get("param1").toString(), equalTo("value1"));
upsertDoc = XContentHelper.convertToMap(request.upsertRequest().source(), true).v2();
assertThat(upsertDoc.get("field1").toString(), equalTo("value1"));
assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
// script with doc
request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject()
.startObject("doc").field("field1", "value1").startObject("compound").field("field2", "value2").endObject().endObject()
.endObject());
Map<String, Object> doc = request.doc().sourceAsMap();
assertThat(doc.get("field1").toString(), equalTo("value1"));
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
}
} }

View File

@ -114,21 +114,17 @@ public class ClusterHealthResponsesTests extends ElasticsearchTestCase {
state = ShardRoutingState.STARTED; state = ShardRoutingState.STARTED;
} else if (i > 3) { } else if (i > 3) {
state = ShardRoutingState.RELOCATING; state = ShardRoutingState.RELOCATING;
} else if (i > 1) {
state = ShardRoutingState.INITIALIZING;
} else { } else {
state = ShardRoutingState.UNASSIGNED; state = ShardRoutingState.INITIALIZING;
} }
switch (state) { switch (state) {
case UNASSIGNED:
return new MutableShardRouting(index, shardId, null, primary, ShardRoutingState.UNASSIGNED, 1);
case STARTED: case STARTED:
return new MutableShardRouting(index, shardId, "node_" + Integer.toString(node_id++), primary, ShardRoutingState.STARTED, 1); return new MutableShardRouting(index, shardId, "node_" + Integer.toString(node_id++), null, null, primary, ShardRoutingState.STARTED, 1);
case INITIALIZING: case INITIALIZING:
return new MutableShardRouting(index, shardId, "node_" + Integer.toString(node_id++), primary, ShardRoutingState.INITIALIZING, 1); return new MutableShardRouting(index, shardId, "node_" + Integer.toString(node_id++), null, null, primary, ShardRoutingState.INITIALIZING, 1);
case RELOCATING: case RELOCATING:
return new MutableShardRouting(index, shardId, "node_" + Integer.toString(node_id++), "node_" + Integer.toString(node_id++), primary, ShardRoutingState.RELOCATING, 1); return new MutableShardRouting(index, shardId, "node_" + Integer.toString(node_id++), "node_" + Integer.toString(node_id++), null, primary, ShardRoutingState.RELOCATING, 1);
default: default:
throw new ElasticsearchException("Unknown state: " + state.name()); throw new ElasticsearchException("Unknown state: " + state.name());
} }

View File

@ -222,7 +222,7 @@ public class ClusterStateDiffTests extends ElasticsearchIntegrationTest {
int replicaCount = randomIntBetween(1, 10); int replicaCount = randomIntBetween(1, 10);
for (int j = 0; j < replicaCount; j++) { for (int j = 0; j < replicaCount; j++) {
indexShard.addShard( indexShard.addShard(
new MutableShardRouting(index, i, randomFrom(nodeIds), j == 0, ShardRoutingState.fromValue((byte) randomIntBetween(1, 4)), 1)); new MutableShardRouting(index, i, randomFrom(nodeIds), null, null, j == 0, ShardRoutingState.fromValue((byte) randomIntBetween(2, 4)), 1));
} }
builder.addIndexShard(indexShard.build()); builder.addIndexShard(indexShard.build());
} }

View File

@ -77,7 +77,7 @@ public abstract class CatAllocationTestBase extends ElasticsearchAllocationTestC
ShardRoutingState state = ShardRoutingState.valueOf(matcher.group(4)); ShardRoutingState state = ShardRoutingState.valueOf(matcher.group(4));
String ip = matcher.group(5); String ip = matcher.group(5);
nodes.add(ip); nodes.add(ip);
MutableShardRouting routing = new MutableShardRouting(index, shard, ip, primary, state, 1); MutableShardRouting routing = new MutableShardRouting(index, shard, ip, null, null, primary, state, 1);
idx.add(routing); idx.add(routing);
logger.debug("Add routing {}", routing); logger.debug("Add routing {}", routing);
} else { } else {

View File

@ -829,8 +829,8 @@ public class DiskThresholdDeciderTests extends ElasticsearchAllocationTestCase {
.build(); .build();
// Two shards consuming each 80% of disk space while 70% is allowed, so shard 0 isn't allowed here // Two shards consuming each 80% of disk space while 70% is allowed, so shard 0 isn't allowed here
MutableShardRouting firstRouting = new MutableShardRouting("test", 0, "node1", true, ShardRoutingState.STARTED, 1); MutableShardRouting firstRouting = new MutableShardRouting("test", 0, "node1", null, null, true, ShardRoutingState.STARTED, 1);
MutableShardRouting secondRouting = new MutableShardRouting("test", 1, "node1", true, ShardRoutingState.STARTED, 1); MutableShardRouting secondRouting = new MutableShardRouting("test", 1, "node1", null, null, true, ShardRoutingState.STARTED, 1);
RoutingNode firstRoutingNode = new RoutingNode("node1", discoveryNode1, Arrays.asList(firstRouting, secondRouting)); RoutingNode firstRoutingNode = new RoutingNode("node1", discoveryNode1, Arrays.asList(firstRouting, secondRouting));
RoutingTable.Builder builder = RoutingTable.builder().add( RoutingTable.Builder builder = RoutingTable.builder().add(
IndexRoutingTable.builder("test") IndexRoutingTable.builder("test")
@ -849,8 +849,8 @@ public class DiskThresholdDeciderTests extends ElasticsearchAllocationTestCase {
assertThat(decision.type(), equalTo(Decision.Type.NO)); assertThat(decision.type(), equalTo(Decision.Type.NO));
// Two shards consuming each 80% of disk space while 70% is allowed, but one is relocating, so shard 0 can stay // Two shards consuming each 80% of disk space while 70% is allowed, but one is relocating, so shard 0 can stay
firstRouting = new MutableShardRouting("test", 0, "node1", true, ShardRoutingState.STARTED, 1); firstRouting = new MutableShardRouting("test", 0, "node1", null, null, true, ShardRoutingState.STARTED, 1);
secondRouting = new MutableShardRouting("test", 1, "node1", "node2", true, ShardRoutingState.RELOCATING, 1); secondRouting = new MutableShardRouting("test", 1, "node1", "node2", null, true, ShardRoutingState.RELOCATING, 1);
firstRoutingNode = new RoutingNode("node1", discoveryNode1, Arrays.asList(firstRouting, secondRouting)); firstRoutingNode = new RoutingNode("node1", discoveryNode1, Arrays.asList(firstRouting, secondRouting));
builder = RoutingTable.builder().add( builder = RoutingTable.builder().add(
IndexRoutingTable.builder("test") IndexRoutingTable.builder("test")

View File

@ -106,7 +106,7 @@ public class IndexCacheableQueryTests extends ElasticsearchTestCase {
IndexReader reader = writer.getReader(); IndexReader reader = writer.getReader();
// IndexReader wrapping is disabled because of LUCENE-6500. // IndexReader wrapping is disabled because of LUCENE-6500.
// Add it back when we are on 5.3 // Add it back when we are on 5.3
assert Version.LATEST == Version.LUCENE_5_2_0; assert Version.LATEST == Version.LUCENE_5_2_1;
IndexSearcher searcher = newSearcher(reader, false); IndexSearcher searcher = newSearcher(reader, false);
reader = searcher.getIndexReader(); // reader might be wrapped reader = searcher.getIndexReader(); // reader might be wrapped
searcher.setQueryCache(cache); searcher.setQueryCache(cache);
@ -125,7 +125,7 @@ public class IndexCacheableQueryTests extends ElasticsearchTestCase {
IndexReader reader2 = writer.getReader(); IndexReader reader2 = writer.getReader();
// IndexReader wrapping is disabled because of LUCENE-6500. // IndexReader wrapping is disabled because of LUCENE-6500.
// Add it back when we are on 5.3 // Add it back when we are on 5.3
assert Version.LATEST == Version.LUCENE_5_2_0; assert Version.LATEST == Version.LUCENE_5_2_1;
searcher = newSearcher(reader2, false); searcher = newSearcher(reader2, false);
reader2 = searcher.getIndexReader(); // reader might be wrapped reader2 = searcher.getIndexReader(); // reader might be wrapped
searcher.setQueryCache(cache); searcher.setQueryCache(cache);

View File

@ -61,28 +61,6 @@ public class SimpleLuceneTests extends ElasticsearchTestCase {
} }
} }
@Test
public void testAddDocAfterPrepareCommit() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
Document document = new Document();
document.add(new TextField("_id", "1", Field.Store.YES));
indexWriter.addDocument(document);
DirectoryReader reader = DirectoryReader.open(indexWriter, true);
assertThat(reader.numDocs(), equalTo(1));
indexWriter.prepareCommit();
// Returns null b/c no changes.
assertThat(DirectoryReader.openIfChanged(reader), equalTo(null));
document = new Document();
document.add(new TextField("_id", "2", Field.Store.YES));
indexWriter.addDocument(document);
indexWriter.commit();
reader = DirectoryReader.openIfChanged(reader);
assertThat(reader.numDocs(), equalTo(2));
}
@Test @Test
public void testSimpleNumericOps() throws Exception { public void testSimpleNumericOps() throws Exception {
Directory dir = new RAMDirectory(); Directory dir = new RAMDirectory();

View File

@ -155,97 +155,6 @@ public class BulkTests extends ElasticsearchIntegrationTest {
assertThat(((Long) getResponse.getField("field").getValue()), equalTo(4l)); assertThat(((Long) getResponse.getField("field").getValue()), equalTo(4l));
} }
@Test
public void testBulkUpdate_simpleOldScriptAPI() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
ensureGreen();
BulkResponse bulkResponse = client().prepareBulk()
.add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("1").setSource("field", 1))
.add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("2").setSource("field", 2).setCreate(true))
.add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("3").setSource("field", 3))
.add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("4").setSource("field", 4))
.add(client().prepareIndex().setIndex(indexOrAlias()).setType("type1").setId("5").setSource("field", 5)).execute()
.actionGet();
assertThat(bulkResponse.hasFailures(), equalTo(false));
assertThat(bulkResponse.getItems().length, equalTo(5));
for (BulkItemResponse bulkItemResponse : bulkResponse) {
assertThat(bulkItemResponse.getIndex(), equalTo("test"));
}
bulkResponse = client()
.prepareBulk()
.add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("1")
.setScript("ctx._source.field += 1", ScriptService.ScriptType.INLINE))
.add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2")
.setScript("ctx._source.field += 1", ScriptService.ScriptType.INLINE).setRetryOnConflict(3))
.add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("3").setDoc(jsonBuilder().startObject().field("field1", "test").endObject()))
.execute().actionGet();
assertThat(bulkResponse.hasFailures(), equalTo(false));
assertThat(bulkResponse.getItems().length, equalTo(3));
for (BulkItemResponse bulkItemResponse : bulkResponse) {
assertThat(bulkItemResponse.getIndex(), equalTo("test"));
}
assertThat(((UpdateResponse) bulkResponse.getItems()[0].getResponse()).getId(), equalTo("1"));
assertThat(((UpdateResponse) bulkResponse.getItems()[0].getResponse()).getVersion(), equalTo(2l));
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getId(), equalTo("2"));
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(2l));
assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getId(), equalTo("3"));
assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(2l));
GetResponse getResponse = client().prepareGet().setIndex("test").setType("type1").setId("1").setFields("field").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(2l));
assertThat(((Long) getResponse.getField("field").getValue()), equalTo(2l));
getResponse = client().prepareGet().setIndex("test").setType("type1").setId("2").setFields("field").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(2l));
assertThat(((Long) getResponse.getField("field").getValue()), equalTo(3l));
getResponse = client().prepareGet().setIndex("test").setType("type1").setId("3").setFields("field1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(2l));
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("test"));
bulkResponse = client().prepareBulk()
.add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("6")
.setScript("ctx._source.field += 1", ScriptService.ScriptType.INLINE)
.setUpsert(jsonBuilder().startObject().field("field", 0).endObject()))
.add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("7")
.setScript("ctx._source.field += 1", ScriptService.ScriptType.INLINE))
.add(client().prepareUpdate().setIndex(indexOrAlias()).setType("type1").setId("2")
.setScript("ctx._source.field += 1", ScriptService.ScriptType.INLINE))
.execute().actionGet();
assertThat(bulkResponse.hasFailures(), equalTo(true));
assertThat(bulkResponse.getItems().length, equalTo(3));
assertThat(((UpdateResponse) bulkResponse.getItems()[0].getResponse()).getId(), equalTo("6"));
assertThat(((UpdateResponse) bulkResponse.getItems()[0].getResponse()).getVersion(), equalTo(1l));
assertThat(bulkResponse.getItems()[1].getResponse(), nullValue());
assertThat(bulkResponse.getItems()[1].getFailure().getIndex(), equalTo("test"));
assertThat(bulkResponse.getItems()[1].getFailure().getId(), equalTo("7"));
assertThat(bulkResponse.getItems()[1].getFailure().getMessage(), containsString("document missing"));
assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getId(), equalTo("2"));
assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getIndex(), equalTo("test"));
assertThat(((UpdateResponse) bulkResponse.getItems()[2].getResponse()).getVersion(), equalTo(3l));
getResponse = client().prepareGet().setIndex("test").setType("type1").setId("6").setFields("field").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(1l));
assertThat(((Long) getResponse.getField("field").getValue()), equalTo(0l));
getResponse = client().prepareGet().setIndex("test").setType("type1").setId("7").setFields("field").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false));
getResponse = client().prepareGet().setIndex("test").setType("type1").setId("2").setFields("field").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(3l));
assertThat(((Long) getResponse.getField("field").getValue()), equalTo(4l));
}
@Test @Test
public void testBulkVersioning() throws Exception { public void testBulkVersioning() throws Exception {
createIndex("test"); createIndex("test");
@ -334,49 +243,6 @@ public class BulkTests extends ElasticsearchIntegrationTest {
assertThat(bulkResponse.getItems()[2].getResponse(), nullValue()); assertThat(bulkResponse.getItems()[2].getResponse(), nullValue());
} }
/*
* TODO Remove in 2.0
*/
@Test
public void testBulkUpdate_malformedScriptsOldScriptAPI() throws Exception {
createIndex("test");
ensureGreen();
BulkResponse bulkResponse = client().prepareBulk()
.add(client().prepareIndex().setIndex("test").setType("type1").setId("1").setSource("field", 1))
.add(client().prepareIndex().setIndex("test").setType("type1").setId("2").setSource("field", 1))
.add(client().prepareIndex().setIndex("test").setType("type1").setId("3").setSource("field", 1)).execute().actionGet();
assertThat(bulkResponse.hasFailures(), equalTo(false));
assertThat(bulkResponse.getItems().length, equalTo(3));
bulkResponse = client()
.prepareBulk()
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("1")
.setScript("ctx._source.field += a", ScriptService.ScriptType.INLINE).setFields("field"))
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("2")
.setScript("ctx._source.field += 1", ScriptService.ScriptType.INLINE).setFields("field"))
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("3")
.setScript("ctx._source.field += a", ScriptService.ScriptType.INLINE).setFields("field"))
.execute().actionGet();
assertThat(bulkResponse.hasFailures(), equalTo(true));
assertThat(bulkResponse.getItems().length, equalTo(3));
assertThat(bulkResponse.getItems()[0].getFailure().getId(), equalTo("1"));
assertThat(bulkResponse.getItems()[0].getFailure().getMessage(), containsString("failed to execute script"));
assertThat(bulkResponse.getItems()[0].getResponse(), nullValue());
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getId(), equalTo("2"));
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getVersion(), equalTo(2l));
assertThat(((Integer) ((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field("field").getValue()), equalTo(2));
assertThat(bulkResponse.getItems()[1].getFailure(), nullValue());
assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo("3"));
assertThat(bulkResponse.getItems()[2].getFailure().getMessage(), containsString("failed to execute script"));
assertThat(bulkResponse.getItems()[2].getResponse(), nullValue());
}
@Test @Test
public void testBulkUpdate_largerVolume() throws Exception { public void testBulkUpdate_largerVolume() throws Exception {
createIndex("test"); createIndex("test");
@ -511,146 +377,6 @@ public class BulkTests extends ElasticsearchIntegrationTest {
} }
} }
/*
* TODO Remove in 2.0
*/
@Test
public void testBulkUpdate_largerVolumeOldScriptAPI() throws Exception {
createIndex("test");
ensureGreen();
int numDocs = scaledRandomIntBetween(100, 2000);
if (numDocs % 2 == 1) {
numDocs++; // this test needs an even num of docs
}
logger.info("Bulk-Indexing {} docs", numDocs);
BulkRequestBuilder builder = client().prepareBulk();
for (int i = 0; i < numDocs; i++) {
builder.add(client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i))
.setScript("ctx._source.counter += 1", ScriptService.ScriptType.INLINE).setFields("counter")
.setUpsert(jsonBuilder().startObject().field("counter", 1).endObject())
);
}
BulkResponse response = builder.execute().actionGet();
assertThat(response.hasFailures(), equalTo(false));
assertThat(response.getItems().length, equalTo(numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getVersion(), equalTo(1l));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo("update"));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getId(), equalTo(Integer.toString(i)));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getVersion(), equalTo(1l));
assertThat(((Integer) ((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue()), equalTo(1));
for (int j = 0; j < 5; j++) {
GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).setFields("counter").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getVersion(), equalTo(1l));
assertThat((Long) getResponse.getField("counter").getValue(), equalTo(1l));
}
}
builder = client().prepareBulk();
for (int i = 0; i < numDocs; i++) {
UpdateRequestBuilder updateBuilder = client().prepareUpdate()
.setIndex("test").setType("type1").setId(Integer.toString(i)).setFields("counter");
if (i % 2 == 0) {
updateBuilder.setScript("ctx._source.counter += 1", ScriptService.ScriptType.INLINE);
} else {
updateBuilder.setDoc(jsonBuilder().startObject().field("counter", 2).endObject());
}
if (i % 3 == 0) {
updateBuilder.setRetryOnConflict(3);
}
builder.add(updateBuilder);
}
response = builder.execute().actionGet();
assertThat(response.hasFailures(), equalTo(false));
assertThat(response.getItems().length, equalTo(numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getVersion(), equalTo(2l));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo("update"));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getId(), equalTo(Integer.toString(i)));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getVersion(), equalTo(2l));
assertThat(((Integer) ((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue()), equalTo(2));
}
builder = client().prepareBulk();
int maxDocs = numDocs / 2 + numDocs;
for (int i = (numDocs / 2); i < maxDocs; i++) {
builder.add(
client().prepareUpdate()
.setIndex("test").setType("type1").setId(Integer.toString(i)).setScript("ctx._source.counter += 1", ScriptService.ScriptType.INLINE)
);
}
response = builder.execute().actionGet();
assertThat(response.hasFailures(), equalTo(true));
assertThat(response.getItems().length, equalTo(numDocs));
for (int i = 0; i < numDocs; i++) {
int id = i + (numDocs / 2);
if (i >= (numDocs / 2)) {
assertThat(response.getItems()[i].getFailure().getId(), equalTo(Integer.toString(id)));
assertThat(response.getItems()[i].getFailure().getMessage(), containsString("document missing"));
} else {
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(id)));
assertThat(response.getItems()[i].getVersion(), equalTo(3l));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo("update"));
}
}
builder = client().prepareBulk();
for (int i = 0; i < numDocs; i++) {
builder.add(
client().prepareUpdate()
.setIndex("test").setType("type1").setId(Integer.toString(i))
.setScript("ctx.op = \"none\"", ScriptService.ScriptType.INLINE)
);
}
response = builder.execute().actionGet();
assertThat(response.buildFailureMessage(), response.hasFailures(), equalTo(false));
assertThat(response.getItems().length, equalTo(numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getItems()[i].getItemId(), equalTo(i));
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo("update"));
}
builder = client().prepareBulk();
for (int i = 0; i < numDocs; i++) {
builder.add(
client().prepareUpdate()
.setIndex("test").setType("type1").setId(Integer.toString(i))
.setScript("ctx.op = \"delete\"", ScriptService.ScriptType.INLINE)
);
}
response = builder.execute().actionGet();
assertThat(response.hasFailures(), equalTo(false));
assertThat(response.getItems().length, equalTo(numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getItems()[i].getItemId(), equalTo(i));
assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getIndex(), equalTo("test"));
assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo("update"));
for (int j = 0; j < 5; j++) {
GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).setFields("counter").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false));
}
}
}
@Test @Test
public void testBulkIndexingWhileInitializing() throws Exception { public void testBulkIndexingWhileInitializing() throws Exception {

View File

@ -262,6 +262,9 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test @Test
public void testSegments() throws Exception { public void testSegments() throws Exception {
IndexSettingsService indexSettingsService = new IndexSettingsService(shardId.index(), Settings.builder().put(defaultSettings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
try (Store store = createStore();
Engine engine = createEngine(indexSettingsService, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), NoMergePolicy.INSTANCE)) {
List<Segment> segments = engine.segments(false); List<Segment> segments = engine.segments(false);
assertThat(segments.isEmpty(), equalTo(true)); assertThat(segments.isEmpty(), equalTo(true));
assertThat(engine.segmentsStats().getCount(), equalTo(0l)); assertThat(engine.segmentsStats().getCount(), equalTo(0l));
@ -378,8 +381,12 @@ public class InternalEngineTests extends ElasticsearchTestCase {
assertThat(segments.get(2).getDeletedDocs(), equalTo(0)); assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true)); assertThat(segments.get(2).isCompound(), equalTo(true));
} }
}
public void testVerboseSegments() throws Exception { public void testVerboseSegments() throws Exception {
IndexSettingsService indexSettingsService = new IndexSettingsService(shardId.index(), Settings.builder().put(defaultSettings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
try (Store store = createStore();
Engine engine = createEngine(indexSettingsService, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), NoMergePolicy.INSTANCE)) {
List<Segment> segments = engine.segments(true); List<Segment> segments = engine.segments(true);
assertThat(segments.isEmpty(), equalTo(true)); assertThat(segments.isEmpty(), equalTo(true));
@ -403,6 +410,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
assertThat(segments.get(0).ramTree, notNullValue()); assertThat(segments.get(0).ramTree, notNullValue());
assertThat(segments.get(1).ramTree, notNullValue()); assertThat(segments.get(1).ramTree, notNullValue());
assertThat(segments.get(2).ramTree, notNullValue()); assertThat(segments.get(2).ramTree, notNullValue());
}
} }

View File

@ -208,18 +208,22 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
} }
protected ShadowEngine createShadowEngine(IndexSettingsService indexSettingsService, Store store) { protected ShadowEngine createShadowEngine(IndexSettingsService indexSettingsService, Store store) {
return new ShadowEngine(config(indexSettingsService, store, null, new MergeSchedulerConfig(indexSettingsService.indexSettings()))); return new ShadowEngine(config(indexSettingsService, store, null, new MergeSchedulerConfig(indexSettingsService.indexSettings()), null));
} }
protected InternalEngine createInternalEngine(IndexSettingsService indexSettingsService, Store store, Path translogPath) { protected InternalEngine createInternalEngine(IndexSettingsService indexSettingsService, Store store, Path translogPath) {
return new InternalEngine(config(indexSettingsService, store, translogPath, new MergeSchedulerConfig(indexSettingsService.indexSettings())), true); return createInternalEngine(indexSettingsService, store, translogPath, newMergePolicy());
} }
public EngineConfig config(IndexSettingsService indexSettingsService, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig) { protected InternalEngine createInternalEngine(IndexSettingsService indexSettingsService, Store store, Path translogPath, MergePolicy mergePolicy) {
return new InternalEngine(config(indexSettingsService, store, translogPath, new MergeSchedulerConfig(indexSettingsService.indexSettings()), mergePolicy), true);
}
public EngineConfig config(IndexSettingsService indexSettingsService, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy) {
IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriterConfig iwc = newIndexWriterConfig();
TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettingsService.getSettings(), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettingsService.getSettings(), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool);
EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, EMPTY_SETTINGS, new ShardSlowLogIndexingService(shardId, EMPTY_SETTINGS, indexSettingsService)), indexSettingsService EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, EMPTY_SETTINGS, new ShardSlowLogIndexingService(shardId, EMPTY_SETTINGS, indexSettingsService)), indexSettingsService
, null, store, createSnapshotDeletionPolicy(),newMergePolicy(), mergeSchedulerConfig, , null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig,
iwc.getAnalyzer(), iwc.getSimilarity() , new CodecService(shardId.index()), new Engine.FailedEngineListener() { iwc.getAnalyzer(), iwc.getSimilarity() , new CodecService(shardId.index()), new Engine.FailedEngineListener() {
@Override @Override
public void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable t) { public void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable t) {
@ -264,6 +268,9 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
@Test @Test
public void testSegments() throws Exception { public void testSegments() throws Exception {
IndexSettingsService indexSettingsService = new IndexSettingsService(shardId.index(), Settings.builder().put(defaultSettings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
primaryEngine.close(); // recreate without merging
primaryEngine = createInternalEngine(indexSettingsService, store, createTempDir(), NoMergePolicy.INSTANCE);
List<Segment> segments = primaryEngine.segments(false); List<Segment> segments = primaryEngine.segments(false);
assertThat(segments.isEmpty(), equalTo(true)); assertThat(segments.isEmpty(), equalTo(true));
assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0l)); assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0l));
@ -433,6 +440,9 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
@Test @Test
public void testVerboseSegments() throws Exception { public void testVerboseSegments() throws Exception {
IndexSettingsService indexSettingsService = new IndexSettingsService(shardId.index(), Settings.builder().put(defaultSettings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
primaryEngine.close(); // recreate without merging
primaryEngine = createInternalEngine(indexSettingsService, store, createTempDir(), NoMergePolicy.INSTANCE);
List<Segment> segments = primaryEngine.segments(true); List<Segment> segments = primaryEngine.segments(true);
assertThat(segments.isEmpty(), equalTo(true)); assertThat(segments.isEmpty(), equalTo(true));

View File

@ -87,7 +87,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
} }
LeafReaderContext context = refreshReader(); LeafReaderContext context = refreshReader();
Map<FieldDataType, Type> typeMap = new HashMap<>(); Map<FieldDataType, Type> typeMap = new HashMap<>();
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "fst")), Type.Bytes);
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "paged_bytes")), Type.Bytes); typeMap.put(new FieldDataType("string", Settings.builder().put("format", "paged_bytes")), Type.Bytes);
typeMap.put(new FieldDataType("byte", Settings.builder().put("format", "array")), Type.Integer); typeMap.put(new FieldDataType("byte", Settings.builder().put("format", "array")), Type.Integer);
typeMap.put(new FieldDataType("short", Settings.builder().put("format", "array")), Type.Integer); typeMap.put(new FieldDataType("short", Settings.builder().put("format", "array")), Type.Integer);
@ -325,7 +324,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
} }
LeafReaderContext context = refreshReader(); LeafReaderContext context = refreshReader();
Map<FieldDataType, Type> typeMap = new HashMap<>(); Map<FieldDataType, Type> typeMap = new HashMap<>();
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "fst")), Type.Bytes);
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "paged_bytes")), Type.Bytes); typeMap.put(new FieldDataType("string", Settings.builder().put("format", "paged_bytes")), Type.Bytes);
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "doc_values")), Type.Bytes); typeMap.put(new FieldDataType("string", Settings.builder().put("format", "doc_values")), Type.Bytes);
// TODO add filters // TODO add filters
@ -384,7 +382,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
refreshReader(); refreshReader();
Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuelFieldDataTests.Type>(); Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuelFieldDataTests.Type>();
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "fst")), Type.Bytes);
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "paged_bytes")), Type.Bytes); typeMap.put(new FieldDataType("string", Settings.builder().put("format", "paged_bytes")), Type.Bytes);
typeMap.put(new FieldDataType("string", Settings.builder().put("format", "doc_values")), Type.Bytes); typeMap.put(new FieldDataType("string", Settings.builder().put("format", "doc_values")), Type.Bytes);
@ -437,7 +434,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
Map<FieldDataType, Type> typeMap = new HashMap<>(); Map<FieldDataType, Type> typeMap = new HashMap<>();
final Distance precision = new Distance(1, randomFrom(DistanceUnit.values())); final Distance precision = new Distance(1, randomFrom(DistanceUnit.values()));
typeMap.put(new FieldDataType("geo_point", Settings.builder().put("format", "array")), Type.GeoPoint); typeMap.put(new FieldDataType("geo_point", Settings.builder().put("format", "array")), Type.GeoPoint);
typeMap.put(new FieldDataType("geo_point", Settings.builder().put("format", "compressed").put("precision", precision)), Type.GeoPoint);
typeMap.put(new FieldDataType("geo_point", Settings.builder().put("format", "doc_values")), Type.GeoPoint); typeMap.put(new FieldDataType("geo_point", Settings.builder().put("format", "doc_values")), Type.GeoPoint);
ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<>(typeMap.entrySet()); ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<>(typeMap.entrySet());

View File

@ -60,7 +60,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
} }
writer.forceMerge(1, true); writer.forceMerge(1, true);
LeafReaderContext context = refreshReader(); LeafReaderContext context = refreshReader();
String[] formats = new String[] { "fst", "paged_bytes"}; String[] formats = new String[] { "paged_bytes"};
for (String format : formats) { for (String format : formats) {
{ {
@ -153,7 +153,7 @@ public class FilterFieldDataTest extends AbstractFieldDataTests {
logger.debug(hundred + " " + ten + " " + five); logger.debug(hundred + " " + ten + " " + five);
writer.forceMerge(1, true); writer.forceMerge(1, true);
LeafReaderContext context = refreshReader(); LeafReaderContext context = refreshReader();
String[] formats = new String[] { "fst", "paged_bytes"}; String[] formats = new String[] { "paged_bytes"};
for (String format : formats) { for (String format : formats) {
{ {
ifdService.clear(); ifdService.clear();

View File

@ -28,7 +28,6 @@ import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.plain.*; import org.elasticsearch.index.fielddata.plain.*;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.MapperBuilders;
@ -101,10 +100,10 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
final IndexService indexService = createIndex("test"); final IndexService indexService = createIndex("test");
final IndexFieldDataService ifdService = indexService.fieldData(); final IndexFieldDataService ifdService = indexService.fieldData();
final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1)); final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1));
final MappedFieldType stringMapper = MapperBuilders.stringField("string").tokenized(false).fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(Settings.builder().put("format", "fst").build()).build(ctx).fieldType(); final MappedFieldType stringMapper = MapperBuilders.stringField("string").tokenized(false).fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(Settings.builder().put("format", "disabled").build()).build(ctx).fieldType();
ifdService.clear(); ifdService.clear();
IndexFieldData<?> fd = ifdService.getForField(stringMapper); IndexFieldData<?> fd = ifdService.getForField(stringMapper);
assertTrue(fd instanceof FSTBytesIndexFieldData); assertTrue(fd instanceof DisabledIndexFieldData);
final Settings fdSettings = Settings.builder().put("format", "array").build(); final Settings fdSettings = Settings.builder().put("format", "array").build();
for (MappedFieldType mapper : Arrays.asList( for (MappedFieldType mapper : Arrays.asList(
@ -133,7 +132,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
final IndexService indexService = createIndex("test"); final IndexService indexService = createIndex("test");
final IndexFieldDataService ifdService = indexService.fieldData(); final IndexFieldDataService ifdService = indexService.fieldData();
final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1)); final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1));
final MappedFieldType mapper1 = MapperBuilders.stringField("s").tokenized(false).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "paged_bytes").build()).build(ctx).fieldType(); final MappedFieldType mapper1 = MapperBuilders.stringField("s").tokenized(false).docValues(true).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "paged_bytes").build()).build(ctx).fieldType();
final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer()));
Document doc = new Document(); Document doc = new Document();
doc.add(new StringField("s", "thisisastring", Store.NO)); doc.add(new StringField("s", "thisisastring", Store.NO));
@ -150,18 +149,10 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
// write new segment // write new segment
writer.addDocument(doc); writer.addDocument(doc);
final IndexReader reader2 = DirectoryReader.open(writer, true); final IndexReader reader2 = DirectoryReader.open(writer, true);
final MappedFieldType mapper2 = MapperBuilders.stringField("s").tokenized(false).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "fst").build()).build(ctx).fieldType(); final MappedFieldType mapper2 = MapperBuilders.stringField("s").tokenized(false).docValues(true).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "doc_values").build()).build(ctx).fieldType();
ifdService.onMappingUpdate(); ifdService.onMappingUpdate();
ifd = ifdService.getForField(mapper2); ifd = ifdService.getForField(mapper2);
assertThat(ifd, instanceOf(FSTBytesIndexFieldData.class)); assertThat(ifd, instanceOf(SortedSetDVOrdinalsIndexFieldData.class));
for (LeafReaderContext arc : reader2.leaves()) {
AtomicFieldData afd = ifd.load(arc);
if (oldSegments.contains(arc.reader())) {
assertThat(afd, instanceOf(PagedBytesAtomicFieldData.class));
} else {
assertThat(afd, instanceOf(FSTBytesAtomicFieldData.class));
}
}
reader1.close(); reader1.close();
reader2.close(); reader2.close();
writer.close(); writer.close();

View File

@ -0,0 +1,126 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
import org.elasticsearch.test.ElasticsearchTestCase;
/** Base test case for subclasses of MappedFieldType */
public abstract class FieldTypeTestCase extends ElasticsearchTestCase {
/** Create a default constructed fieldtype */
protected abstract MappedFieldType createDefaultFieldType();
/** A dummy null value to use when modifying null value */
protected Object dummyNullValue() {
return "dummyvalue";
}
/** Returns the number of properties that can be modified for the fieldtype */
protected int numProperties() {
return 10;
}
/** Modifies a property, identified by propNum, on the given fieldtype */
protected void modifyProperty(MappedFieldType ft, int propNum) {
switch (propNum) {
case 0: ft.setNames(new MappedFieldType.Names("dummy")); break;
case 1: ft.setBoost(1.1f); break;
case 2: ft.setHasDocValues(!ft.hasDocValues()); break;
case 3: ft.setIndexAnalyzer(Lucene.STANDARD_ANALYZER); break;
case 4: ft.setSearchAnalyzer(Lucene.STANDARD_ANALYZER); break;
case 5: ft.setSearchQuoteAnalyzer(Lucene.STANDARD_ANALYZER); break;
case 6: ft.setSimilarity(new BM25SimilarityProvider("foo", Settings.EMPTY)); break;
case 7: ft.setNormsLoading(MappedFieldType.Loading.LAZY); break;
case 8: ft.setFieldDataType(new FieldDataType("foo", Settings.builder().put("loading", "eager").build())); break;
case 9: ft.setNullValue(dummyNullValue()); break;
default: fail("unknown fieldtype property number " + propNum);
}
}
// TODO: remove this once toString is no longer final on FieldType...
protected void assertEquals(int i, MappedFieldType ft1, MappedFieldType ft2) {
assertEquals("prop " + i + "\nexpected: " + toString(ft1) + "; \nactual: " + toString(ft2), ft1, ft2);
}
protected String toString(MappedFieldType ft) {
return "MappedFieldType{" +
"names=" + ft.names() +
", boost=" + ft.boost() +
", docValues=" + ft.hasDocValues() +
", indexAnalyzer=" + ft.indexAnalyzer() +
", searchAnalyzer=" + ft.searchAnalyzer() +
", searchQuoteAnalyzer=" + ft.searchQuoteAnalyzer() +
", similarity=" + ft.similarity() +
", normsLoading=" + ft.normsLoading() +
", fieldDataType=" + ft.fieldDataType() +
", nullValue=" + ft.nullValue() +
", nullValueAsString='" + ft.nullValueAsString() + "'" +
"} " + super.toString();
}
public void testClone() {
MappedFieldType fieldType = createDefaultFieldType();
MappedFieldType clone = fieldType.clone();
assertNotSame(clone, fieldType);
assertEquals(clone.getClass(), fieldType.getClass());
assertEquals(clone, fieldType);
assertEquals(clone, clone.clone()); // transitivity
for (int i = 0; i < numProperties(); ++i) {
fieldType = createDefaultFieldType();
modifyProperty(fieldType, i);
clone = fieldType.clone();
assertNotSame(clone, fieldType);
assertEquals(i, clone, fieldType);
}
}
public void testEquals() {
MappedFieldType ft1 = createDefaultFieldType();
MappedFieldType ft2 = createDefaultFieldType();
assertEquals(ft1, ft1); // reflexive
assertEquals(ft1, ft2); // symmetric
assertEquals(ft2, ft1);
assertEquals(ft1.hashCode(), ft2.hashCode());
for (int i = 0; i < numProperties(); ++i) {
ft2 = createDefaultFieldType();
modifyProperty(ft2, i);
assertNotEquals(ft1, ft2);
assertNotEquals(ft1.hashCode(), ft2.hashCode());
}
}
public void testFreeze() {
for (int i = 0; i < numProperties(); ++i) {
MappedFieldType fieldType = createDefaultFieldType();
fieldType.freeze();
try {
modifyProperty(fieldType, i);
fail("expected already frozen exception for property " + i);
} catch (IllegalStateException e) {
assertTrue(e.getMessage().contains("already frozen"));
}
}
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
public class MappedFieldTypeTests extends FieldTypeTestCase {
@Override
public MappedFieldType createDefaultFieldType() {
return new MappedFieldType();
}
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class BinaryFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new BinaryFieldMapper.BinaryFieldType();
}
@Override
protected int numProperties() {
return 1 + super.numProperties();
}
@Override
protected void modifyProperty(MappedFieldType ft, int propNum) {
BinaryFieldMapper.BinaryFieldType bft = (BinaryFieldMapper.BinaryFieldType)ft;
switch (propNum) {
case 0: bft.setTryUncompressing(!bft.tryUncompressing()); break;
default: super.modifyProperty(ft, propNum - 1);
}
}
}

View File

@ -16,18 +16,19 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package org.elasticsearch.index.mapper.core;
package org.elasticsearch.index.fielddata; import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.common.settings.Settings; public class BooleanFieldTypeTests extends FieldTypeTestCase {
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; @Override
protected MappedFieldType createDefaultFieldType() {
/** return new BooleanFieldMapper.BooleanFieldType();
*/ }
public class FSTPackedBytesStringFieldDataTests extends AbstractStringFieldDataTests {
@Override @Override
protected FieldDataType getFieldDataType() { protected Object dummyNullValue() {
return new FieldDataType("string", Settings.builder().put("format", "fst").put(OrdinalsBuilder.FORCE_MULTI_ORDINALS, randomBoolean())); return true;
} }
} }

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class ByteFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new ByteFieldMapper.ByteFieldType();
}
@Override
protected Object dummyNullValue() {
return (byte)10;
}
}

View File

@ -0,0 +1,29 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class CompletionFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new CompletionFieldMapper.CompletionFieldType();
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
public class DateFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new DateFieldMapper.DateFieldType();
}
@Override
protected Object dummyNullValue() {
return 10;
}
@Override
protected int numProperties() {
return 2 + super.numProperties();
}
@Override
protected void modifyProperty(MappedFieldType ft, int propNum) {
DateFieldMapper.DateFieldType dft = (DateFieldMapper.DateFieldType)ft;
switch (propNum) {
case 0: dft.setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); break;
case 1: dft.setTimeUnit(TimeUnit.HOURS); break;
default: super.modifyProperty(ft, propNum - 2);
}
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class DoubleFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new DoubleFieldMapper.DoubleFieldType();
}
@Override
protected Object dummyNullValue() {
return 10.0D;
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class FloatFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new DoubleFieldMapper.DoubleFieldType();
}
@Override
protected Object dummyNullValue() {
return 10.0;
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class IntegerFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new IntegerFieldMapper.IntegerFieldType();
}
@Override
protected Object dummyNullValue() {
return 10;
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class LongFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new LongFieldMapper.LongFieldType();
}
@Override
protected Object dummyNullValue() {
return (long)10;
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class ShortFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new ShortFieldMapper.ShortFieldType();
}
@Override
protected Object dummyNullValue() {
return (short)10;
}
}

View File

@ -0,0 +1,29 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
public class StringFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new StringFieldMapper.StringFieldType();
}
}

Some files were not shown because too many files have changed in this diff Show More