Merge branch 'master' into feature/query-refactoring
Conflicts: core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java
This commit is contained in:
commit
fc1b178dc4
44
core/pom.xml
44
core/pom.xml
|
@ -1024,6 +1024,50 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- integration tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-antrun-plugin</artifactId>
|
||||
<executions>
|
||||
<!-- start up external cluster -->
|
||||
<execution>
|
||||
<id>integ-setup</id>
|
||||
<phase>pre-integration-test</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<target>
|
||||
<ant antfile="${elasticsearch.tools.directory}/ant/integration-tests.xml"
|
||||
target="start-external-cluster"/>
|
||||
</target>
|
||||
</configuration>
|
||||
</execution>
|
||||
<!-- shut down external cluster -->
|
||||
<execution>
|
||||
<id>integ-teardown</id>
|
||||
<phase>post-integration-test</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<target>
|
||||
<ant antfile="${elasticsearch.tools.directory}/ant/integration-tests.xml"
|
||||
target="stop-external-cluster"/>
|
||||
</target>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
<configuration>
|
||||
<systemPropertyVariables>
|
||||
<tests.cluster>127.0.0.1:9300</tests.cluster>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
|
|
|
@ -428,8 +428,7 @@ public class MapperQueryParser extends QueryParser {
|
|||
currentFieldType = parseContext.fieldMapper(field);
|
||||
if (currentFieldType != null) {
|
||||
try {
|
||||
//LUCENE 4 UPGRADE I disabled transpositions here by default - maybe this needs to be changed
|
||||
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), false);
|
||||
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
|
||||
} catch (RuntimeException e) {
|
||||
if (settings.lenient()) {
|
||||
return null;
|
||||
|
@ -444,8 +443,7 @@ public class MapperQueryParser extends QueryParser {
|
|||
protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
|
||||
String text = term.text();
|
||||
int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity, text.codePointCount(0, text.length()));
|
||||
//LUCENE 4 UPGRADE I disabled transpositions here by default - maybe this needs to be changed
|
||||
FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, settings.fuzzyMaxExpansions(), false);
|
||||
FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
|
||||
QueryParsers.setRewriteMethod(query, settings.fuzzyRewriteMethod());
|
||||
return query;
|
||||
}
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.http.HttpInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.monitor.network.NetworkInfo;
|
||||
import org.elasticsearch.monitor.os.OsInfo;
|
||||
import org.elasticsearch.monitor.process.ProcessInfo;
|
||||
import org.elasticsearch.threadpool.ThreadPoolInfo;
|
||||
|
@ -65,9 +64,6 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
@Nullable
|
||||
private ThreadPoolInfo threadPool;
|
||||
|
||||
@Nullable
|
||||
private NetworkInfo network;
|
||||
|
||||
@Nullable
|
||||
private TransportInfo transport;
|
||||
|
||||
|
@ -81,7 +77,7 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
}
|
||||
|
||||
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable ImmutableMap<String, String> serviceAttributes, @Nullable Settings settings,
|
||||
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool, @Nullable NetworkInfo network,
|
||||
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
|
||||
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsInfo plugins) {
|
||||
super(node);
|
||||
this.version = version;
|
||||
|
@ -92,7 +88,6 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
this.process = process;
|
||||
this.jvm = jvm;
|
||||
this.threadPool = threadPool;
|
||||
this.network = network;
|
||||
this.transport = transport;
|
||||
this.http = http;
|
||||
this.plugins = plugins;
|
||||
|
@ -165,14 +160,6 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
return this.threadPool;
|
||||
}
|
||||
|
||||
/**
|
||||
* Network level information.
|
||||
*/
|
||||
@Nullable
|
||||
public NetworkInfo getNetwork() {
|
||||
return network;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public TransportInfo getTransport() {
|
||||
return transport;
|
||||
|
@ -222,9 +209,6 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
if (in.readBoolean()) {
|
||||
threadPool = ThreadPoolInfo.readThreadPoolInfo(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
network = NetworkInfo.readNetworkInfo(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
transport = TransportInfo.readTransportInfo(in);
|
||||
}
|
||||
|
@ -281,12 +265,6 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
out.writeBoolean(true);
|
||||
threadPool.writeTo(out);
|
||||
}
|
||||
if (network == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
network.writeTo(out);
|
||||
}
|
||||
if (transport == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
|
@ -306,5 +284,4 @@ public class NodeInfo extends BaseNodeResponse {
|
|||
plugins.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -111,9 +111,6 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
|
|||
if (nodeInfo.getThreadPool() != null) {
|
||||
nodeInfo.getThreadPool().toXContent(builder, params);
|
||||
}
|
||||
if (nodeInfo.getNetwork() != null) {
|
||||
nodeInfo.getNetwork().toXContent(builder, params);
|
||||
}
|
||||
if (nodeInfo.getTransport() != null) {
|
||||
nodeInfo.getTransport().toXContent(builder, params);
|
||||
}
|
||||
|
|
|
@ -29,9 +29,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.http.HttpStats;
|
||||
import org.elasticsearch.indices.NodeIndicesStats;
|
||||
import org.elasticsearch.indices.breaker.AllCircuitBreakerStats;
|
||||
import org.elasticsearch.monitor.fs.FsStats;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmStats;
|
||||
import org.elasticsearch.monitor.network.NetworkStats;
|
||||
import org.elasticsearch.monitor.os.OsStats;
|
||||
import org.elasticsearch.monitor.process.ProcessStats;
|
||||
import org.elasticsearch.threadpool.ThreadPoolStats;
|
||||
|
@ -63,10 +62,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
private ThreadPoolStats threadPool;
|
||||
|
||||
@Nullable
|
||||
private NetworkStats network;
|
||||
|
||||
@Nullable
|
||||
private FsStats fs;
|
||||
private FsInfo fs;
|
||||
|
||||
@Nullable
|
||||
private TransportStats transport;
|
||||
|
@ -82,7 +78,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
|
||||
public NodeStats(DiscoveryNode node, long timestamp, @Nullable NodeIndicesStats indices,
|
||||
@Nullable OsStats os, @Nullable ProcessStats process, @Nullable JvmStats jvm, @Nullable ThreadPoolStats threadPool,
|
||||
@Nullable NetworkStats network, @Nullable FsStats fs, @Nullable TransportStats transport, @Nullable HttpStats http,
|
||||
@Nullable FsInfo fs, @Nullable TransportStats transport, @Nullable HttpStats http,
|
||||
@Nullable AllCircuitBreakerStats breaker) {
|
||||
super(node);
|
||||
this.timestamp = timestamp;
|
||||
|
@ -91,7 +87,6 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
this.process = process;
|
||||
this.jvm = jvm;
|
||||
this.threadPool = threadPool;
|
||||
this.network = network;
|
||||
this.fs = fs;
|
||||
this.transport = transport;
|
||||
this.http = http;
|
||||
|
@ -147,19 +142,11 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
return this.threadPool;
|
||||
}
|
||||
|
||||
/**
|
||||
* Network level statistics.
|
||||
*/
|
||||
@Nullable
|
||||
public NetworkStats getNetwork() {
|
||||
return network;
|
||||
}
|
||||
|
||||
/**
|
||||
* File system level stats.
|
||||
*/
|
||||
@Nullable
|
||||
public FsStats getFs() {
|
||||
public FsInfo getFs() {
|
||||
return fs;
|
||||
}
|
||||
|
||||
|
@ -204,10 +191,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
threadPool = ThreadPoolStats.readThreadPoolStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
network = NetworkStats.readNetworkStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
fs = FsStats.readFsStats(in);
|
||||
fs = FsInfo.readFsInfo(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
transport = TransportStats.readTransportStats(in);
|
||||
|
@ -253,12 +237,6 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
out.writeBoolean(true);
|
||||
threadPool.writeTo(out);
|
||||
}
|
||||
if (network == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
network.writeTo(out);
|
||||
}
|
||||
if (fs == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
|
@ -313,9 +291,6 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
if (getThreadPool() != null) {
|
||||
getThreadPool().toXContent(builder, params);
|
||||
}
|
||||
if (getNetwork() != null) {
|
||||
getNetwork().toXContent(builder, params);
|
||||
}
|
||||
if (getFs() != null) {
|
||||
getFs().toXContent(builder, params);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.monitor.fs.FsStats;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.monitor.os.OsInfo;
|
||||
|
||||
|
@ -52,7 +52,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
private OsStats os;
|
||||
private ProcessStats process;
|
||||
private JvmStats jvm;
|
||||
private FsStats.Info fs;
|
||||
private FsInfo.Path fs;
|
||||
private Set<PluginInfo> plugins;
|
||||
|
||||
private ClusterStatsNodes() {
|
||||
|
@ -63,7 +63,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
this.versions = new HashSet<>();
|
||||
this.os = new OsStats();
|
||||
this.jvm = new JvmStats();
|
||||
this.fs = new FsStats.Info();
|
||||
this.fs = new FsInfo.Path();
|
||||
this.plugins = new HashSet<>();
|
||||
this.process = new ProcessStats();
|
||||
|
||||
|
@ -116,7 +116,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
return jvm;
|
||||
}
|
||||
|
||||
public FsStats.Info getFs() {
|
||||
public FsInfo.Path getFs() {
|
||||
return fs;
|
||||
}
|
||||
|
||||
|
@ -138,7 +138,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
os = OsStats.readOsStats(in);
|
||||
process = ProcessStats.readStats(in);
|
||||
jvm = JvmStats.readJvmStats(in);
|
||||
fs = FsStats.Info.readInfoFrom(in);
|
||||
fs = FsInfo.Path.readInfoFrom(in);
|
||||
|
||||
size = in.readVInt();
|
||||
plugins = new HashSet<>(size);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionWriteResponse;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
|
@ -28,7 +27,6 @@ import org.elasticsearch.action.update.UpdateResponse;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -168,13 +168,13 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
} else if (request instanceof DeleteRequest) {
|
||||
DeleteRequest deleteRequest = (DeleteRequest) request;
|
||||
if (index.equals(deleteRequest.index())) {
|
||||
responses.set(idx, new BulkItemResponse(idx, "index", new BulkItemResponse.Failure(deleteRequest.index(), deleteRequest.type(), deleteRequest.id(), e)));
|
||||
responses.set(idx, new BulkItemResponse(idx, "delete", new BulkItemResponse.Failure(deleteRequest.index(), deleteRequest.type(), deleteRequest.id(), e)));
|
||||
return true;
|
||||
}
|
||||
} else if (request instanceof UpdateRequest) {
|
||||
UpdateRequest updateRequest = (UpdateRequest) request;
|
||||
if (index.equals(updateRequest.index())) {
|
||||
responses.set(idx, new BulkItemResponse(idx, "index", new BulkItemResponse.Failure(updateRequest.index(), updateRequest.type(), updateRequest.id(), e)));
|
||||
responses.set(idx, new BulkItemResponse(idx, "update", new BulkItemResponse.Failure(updateRequest.index(), updateRequest.type(), updateRequest.id(), e)));
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
|
@ -379,7 +379,15 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
if (unavailableException != null) {
|
||||
BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.type(), request.id(),
|
||||
unavailableException);
|
||||
BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, "index", failure);
|
||||
String operationType = "unknown";
|
||||
if (request instanceof IndexRequest) {
|
||||
operationType = "index";
|
||||
} else if (request instanceof DeleteRequest) {
|
||||
operationType = "delete";
|
||||
} else if (request instanceof UpdateRequest) {
|
||||
operationType = "update";
|
||||
}
|
||||
BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, operationType, failure);
|
||||
responses.set(idx, bulkItemResponse);
|
||||
// make sure the request gets never processed again
|
||||
bulkRequest.requests.set(idx, null);
|
||||
|
|
|
@ -41,7 +41,9 @@ import org.elasticsearch.search.internal.InternalSearchHits;
|
|||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.elasticsearch.action.search.type.TransportSearchHelper.internalScrollSearchRequest;
|
||||
|
@ -159,7 +161,9 @@ public class TransportSearchScrollScanAction extends AbstractComponent {
|
|||
searchService.sendExecuteScan(node, internalScrollSearchRequest(searchId, request), new ActionListener<ScrollQueryFetchSearchResult>() {
|
||||
@Override
|
||||
public void onResponse(ScrollQueryFetchSearchResult result) {
|
||||
queryFetchResults.set(shardIndex, result.result());
|
||||
QueryFetchSearchResult shardResult = result.result();
|
||||
Objects.requireNonNull(shardResult, "QueryFetchSearchResult can't be null");
|
||||
queryFetchResults.setOnce(shardIndex, shardResult);
|
||||
if (counter.decrementAndGet() == 0) {
|
||||
finishHim();
|
||||
}
|
||||
|
@ -197,25 +201,27 @@ public class TransportSearchScrollScanAction extends AbstractComponent {
|
|||
|
||||
private void innerFinishHim() throws IOException {
|
||||
int numberOfHits = 0;
|
||||
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
|
||||
List<AtomicArray.Entry<QueryFetchSearchResult>> entries = queryFetchResults.asList();
|
||||
for (AtomicArray.Entry<QueryFetchSearchResult> entry : entries) {
|
||||
numberOfHits += entry.value.queryResult().topDocs().scoreDocs.length;
|
||||
}
|
||||
ScoreDoc[] docs = new ScoreDoc[numberOfHits];
|
||||
int counter = 0;
|
||||
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
|
||||
List<ScoreDoc> docs = new ArrayList<>(numberOfHits);
|
||||
for (AtomicArray.Entry<QueryFetchSearchResult> entry : entries) {
|
||||
ScoreDoc[] scoreDocs = entry.value.queryResult().topDocs().scoreDocs;
|
||||
for (ScoreDoc scoreDoc : scoreDocs) {
|
||||
scoreDoc.shardIndex = entry.index;
|
||||
docs[counter++] = scoreDoc;
|
||||
docs.add(scoreDoc);
|
||||
}
|
||||
}
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(docs, queryFetchResults, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(docs.toArray(new ScoreDoc[0]), queryFetchResults, queryFetchResults);
|
||||
((InternalSearchHits) internalResponse.hits()).totalHits = Long.parseLong(this.scrollId.getAttributes().get("total_hits"));
|
||||
|
||||
|
||||
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
|
||||
for (AtomicArray.Entry<QueryFetchSearchResult> entry : entries) {
|
||||
if (entry.value.queryResult().topDocs().scoreDocs.length < entry.value.queryResult().size()) {
|
||||
// we found more than we want for this round, remove this from our scrolling
|
||||
// we found more than we want for this round, remove this from our scrolling, so we don't go back to
|
||||
// this shard, since all hits have been processed.
|
||||
// The SearchContext already gets freed on the node holding the shard, via a similar check.
|
||||
queryFetchResults.set(entry.index, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,39 +73,41 @@ public class UpdateHelper extends AbstractComponent {
|
|||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Result prepare(UpdateRequest request, IndexShard indexShard) {
|
||||
long getDateNS = System.nanoTime();
|
||||
final GetResult getResult = indexShard.getService().get(request.type(), request.id(),
|
||||
new String[]{RoutingFieldMapper.NAME, ParentFieldMapper.NAME, TTLFieldMapper.NAME, TimestampFieldMapper.NAME},
|
||||
true, request.version(), request.versionType(), FetchSourceContext.FETCH_SOURCE, false);
|
||||
return prepare(request, getResult);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares an update request by converting it into an index or delete request or an update response (no action).
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Result prepare(UpdateRequest request, final GetResult getResult) {
|
||||
long getDateNS = System.nanoTime();
|
||||
if (!getResult.isExists()) {
|
||||
if (request.upsertRequest() == null && !request.docAsUpsert()) {
|
||||
throw new DocumentMissingException(new ShardId(indexShard.indexService().index().name(), request.shardId()), request.type(), request.id());
|
||||
throw new DocumentMissingException(new ShardId(request.index(), request.shardId()), request.type(), request.id());
|
||||
}
|
||||
Long ttl = null;
|
||||
IndexRequest indexRequest = request.docAsUpsert() ? request.doc() : request.upsertRequest();
|
||||
if (request.scriptedUpsert() && (request.script() != null)) {
|
||||
Long ttl = indexRequest.ttl();
|
||||
if (request.scriptedUpsert() && request.script() != null) {
|
||||
// Run the script to perform the create logic
|
||||
IndexRequest upsert = request.upsertRequest();
|
||||
IndexRequest upsert = request.upsertRequest();
|
||||
Map<String, Object> upsertDoc = upsert.sourceAsMap();
|
||||
Map<String, Object> ctx = new HashMap<>(2);
|
||||
// Tell the script that this is a create and not an update
|
||||
ctx.put("op", "create");
|
||||
ctx.put("_source", upsertDoc);
|
||||
try {
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE);
|
||||
script.setNextVar("ctx", ctx);
|
||||
script.run();
|
||||
// we need to unwrap the ctx...
|
||||
ctx = (Map<String, Object>) script.unwrap(ctx);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed to execute script", e);
|
||||
}
|
||||
ctx = executeScript(request, ctx);
|
||||
//Allow the script to set TTL using ctx._ttl
|
||||
ttl = getTTLFromScriptContext(ctx);
|
||||
if (ttl < 0) {
|
||||
ttl = getTTLFromScriptContext(ctx);
|
||||
}
|
||||
|
||||
//Allow the script to abort the create by setting "op" to "none"
|
||||
String scriptOpChoice = (String) ctx.get("op");
|
||||
|
||||
|
||||
// Only valid options for an upsert script are "create"
|
||||
// (the default) or "none", meaning abort upsert
|
||||
if (!"create".equals(scriptOpChoice)) {
|
||||
|
@ -123,8 +125,8 @@ public class UpdateHelper extends AbstractComponent {
|
|||
|
||||
indexRequest.index(request.index()).type(request.type()).id(request.id())
|
||||
// it has to be a "create!"
|
||||
.create(true)
|
||||
.ttl(ttl)
|
||||
.create(true)
|
||||
.ttl(ttl == null || ttl < 0 ? null : ttl)
|
||||
.refresh(request.refresh())
|
||||
.routing(request.routing())
|
||||
.parent(request.parent())
|
||||
|
@ -146,7 +148,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
|
||||
if (getResult.internalSourceRef() == null) {
|
||||
// no source, we can't do nothing, through a failure...
|
||||
throw new DocumentSourceMissingException(new ShardId(indexShard.indexService().index().name(), request.shardId()), request.type(), request.id());
|
||||
throw new DocumentSourceMissingException(new ShardId(request.index(), request.shardId()), request.type(), request.id());
|
||||
}
|
||||
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true);
|
||||
|
@ -192,15 +194,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
ctx.put("_ttl", originalTtl);
|
||||
ctx.put("_source", sourceAndContent.v2());
|
||||
|
||||
try {
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE);
|
||||
script.setNextVar("ctx", ctx);
|
||||
script.run();
|
||||
// we need to unwrap the ctx...
|
||||
ctx = (Map<String, Object>) script.unwrap(ctx);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed to execute script", e);
|
||||
}
|
||||
ctx = executeScript(request, ctx);
|
||||
|
||||
operation = (String) ctx.get("op");
|
||||
|
||||
|
@ -213,7 +207,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
}
|
||||
|
||||
ttl = getTTLFromScriptContext(ctx);
|
||||
|
||||
|
||||
updatedSourceAsMap = (Map<String, Object>) ctx.get("_source");
|
||||
}
|
||||
|
||||
|
@ -243,7 +237,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
return new Result(deleteRequest, Operation.DELETE, updatedSourceAsMap, updateSourceContentType);
|
||||
} else if ("none".equals(operation)) {
|
||||
UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false);
|
||||
update.setGetResult(extractGetResult(request, indexShard.indexService().index().name(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
|
||||
update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
|
||||
return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType);
|
||||
} else {
|
||||
logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript());
|
||||
|
@ -252,6 +246,21 @@ public class UpdateHelper extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
private Map<String, Object> executeScript(UpdateRequest request, Map<String, Object> ctx) {
|
||||
try {
|
||||
if (scriptService != null) {
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE);
|
||||
script.setNextVar("ctx", ctx);
|
||||
script.run();
|
||||
// we need to unwrap the ctx...
|
||||
ctx = (Map<String, Object>) script.unwrap(ctx);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed to execute script", e);
|
||||
}
|
||||
return ctx;
|
||||
}
|
||||
|
||||
private Long getTTLFromScriptContext(Map<String, Object> ctx) {
|
||||
Long ttl = null;
|
||||
Object fetchedTTL = ctx.get("_ttl");
|
||||
|
|
|
@ -690,16 +690,18 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
return this.docAsUpsert;
|
||||
}
|
||||
|
||||
public void docAsUpsert(boolean shouldUpsertDoc) {
|
||||
public UpdateRequest docAsUpsert(boolean shouldUpsertDoc) {
|
||||
this.docAsUpsert = shouldUpsertDoc;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean scriptedUpsert(){
|
||||
return this.scriptedUpsert;
|
||||
}
|
||||
|
||||
public void scriptedUpsert(boolean scriptedUpsert) {
|
||||
public UpdateRequest scriptedUpsert(boolean scriptedUpsert) {
|
||||
this.scriptedUpsert = scriptedUpsert;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -81,10 +81,10 @@ public class JarHell {
|
|||
// a "list" at all. So just exclude any elements underneath the java home
|
||||
String javaHome = System.getProperty("java.home");
|
||||
logger.debug("java.home: {}", javaHome);
|
||||
final Map<String,URL> clazzes = new HashMap<>(32768);
|
||||
Set<String> seenJars = new HashSet<>();
|
||||
final Map<String,Path> clazzes = new HashMap<>(32768);
|
||||
Set<Path> seenJars = new HashSet<>();
|
||||
for (final URL url : urls) {
|
||||
String path = URLDecoder.decode(url.getPath(), "UTF-8");
|
||||
final Path path = PathUtils.get(url.toURI());
|
||||
// exclude system resources
|
||||
if (path.startsWith(javaHome)) {
|
||||
logger.debug("excluding system resource: {}", path);
|
||||
|
@ -96,7 +96,7 @@ public class JarHell {
|
|||
continue; // we can't fail because of sheistiness with joda-time
|
||||
}
|
||||
logger.debug("examining jar: {}", path);
|
||||
try (JarFile file = new JarFile(path)) {
|
||||
try (JarFile file = new JarFile(path.toString())) {
|
||||
Manifest manifest = file.getManifest();
|
||||
if (manifest != null) {
|
||||
// inspect Manifest: give a nice error if jar requires a newer java version
|
||||
|
@ -124,7 +124,7 @@ public class JarHell {
|
|||
if (entry.endsWith(".class")) {
|
||||
// for jar format, the separator is defined as /
|
||||
entry = entry.replace('/', '.').substring(0, entry.length() - 6);
|
||||
checkClass(clazzes, entry, url);
|
||||
checkClass(clazzes, entry, path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -140,7 +140,7 @@ public class JarHell {
|
|||
if (entry.endsWith(".class")) {
|
||||
// normalize with the os separator
|
||||
entry = entry.replace(sep, ".").substring(0, entry.length() - 6);
|
||||
checkClass(clazzes, entry, url);
|
||||
checkClass(clazzes, entry, path);
|
||||
}
|
||||
return super.visitFile(file, attrs);
|
||||
}
|
||||
|
@ -148,21 +148,32 @@ public class JarHell {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "proper use of URL to reduce noise")
|
||||
static void checkClass(Map<String,URL> clazzes, String clazz, URL url) {
|
||||
if (clazz.startsWith("org.apache.log4j")) {
|
||||
return; // go figure, jar hell for what should be System.out.println...
|
||||
}
|
||||
if (clazz.equals("org.joda.time.base.BaseDateTime")) {
|
||||
return; // apparently this is intentional... clean this up
|
||||
}
|
||||
URL previous = clazzes.put(clazz, url);
|
||||
|
||||
static void checkClass(Map<String,Path> clazzes, String clazz, Path jarpath) {
|
||||
Path previous = clazzes.put(clazz, jarpath);
|
||||
if (previous != null) {
|
||||
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
|
||||
"class: " + clazz + System.lineSeparator() +
|
||||
"jar1: " + previous.getPath() + System.lineSeparator() +
|
||||
"jar2: " + url.getPath());
|
||||
if (previous.equals(jarpath)) {
|
||||
if (clazz.startsWith("org.apache.xmlbeans")) {
|
||||
return; // https://issues.apache.org/jira/browse/XMLBEANS-499
|
||||
}
|
||||
// throw a better exception in this ridiculous case.
|
||||
// unfortunately the zip file format allows this buggy possibility
|
||||
// UweSays: It can, but should be considered as bug :-)
|
||||
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
|
||||
"class: " + clazz + System.lineSeparator() +
|
||||
"exists multiple times in jar: " + jarpath + " !!!!!!!!!");
|
||||
} else {
|
||||
if (clazz.startsWith("org.apache.log4j")) {
|
||||
return; // go figure, jar hell for what should be System.out.println...
|
||||
}
|
||||
if (clazz.equals("org.joda.time.base.BaseDateTime")) {
|
||||
return; // apparently this is intentional... clean this up
|
||||
}
|
||||
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
|
||||
"class: " + clazz + System.lineSeparator() +
|
||||
"jar1: " + previous + System.lineSeparator() +
|
||||
"jar2: " + jarpath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -80,6 +80,7 @@ final class Security {
|
|||
m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core");
|
||||
m.put(Pattern.compile(".*jsr166e-.*\\.jar$"), "es.security.jar.twitter.jsr166e");
|
||||
m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock");
|
||||
m.put(Pattern.compile(".*bcprov-.*\\.jar$"), "es.security.jar.bouncycastle.bcprov");
|
||||
SPECIAL_JARS = Collections.unmodifiableMap(m);
|
||||
}
|
||||
|
||||
|
@ -96,11 +97,9 @@ final class Security {
|
|||
for (Map.Entry<Pattern,String> e : SPECIAL_JARS.entrySet()) {
|
||||
if (e.getKey().matcher(url.getPath()).matches()) {
|
||||
String prop = e.getValue();
|
||||
// TODO: we need to fix plugins to not include duplicate e.g. lucene-core jars,
|
||||
// to add back this safety check! see https://github.com/elastic/elasticsearch/issues/11647
|
||||
// if (System.getProperty(prop) != null) {
|
||||
// throw new IllegalStateException("property: " + prop + " is unexpectedly set: " + System.getProperty(prop));
|
||||
//}
|
||||
if (System.getProperty(prop) != null) {
|
||||
throw new IllegalStateException("property: " + prop + " is unexpectedly set: " + System.getProperty(prop));
|
||||
}
|
||||
System.setProperty(prop, url.toString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
import org.elasticsearch.monitor.fs.FsStats;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.ReceiveTimeoutTransportException;
|
||||
|
@ -320,7 +320,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu
|
|||
long available = 0;
|
||||
long total = 0;
|
||||
|
||||
for (FsStats.Info info : nodeStats.getFs()) {
|
||||
for (FsInfo.Path info : nodeStats.getFs()) {
|
||||
available += info.getAvailable().bytes();
|
||||
total += info.getTotal().bytes();
|
||||
}
|
||||
|
|
|
@ -167,6 +167,7 @@ public class IndexMetaData implements Diffable<IndexMetaData> {
|
|||
public static final String SETTING_VERSION_UPGRADED_STRING = "index.version.upgraded_string";
|
||||
public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible";
|
||||
public static final String SETTING_CREATION_DATE = "index.creation_date";
|
||||
public static final String SETTING_PRIORITY = "index.priority";
|
||||
public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string";
|
||||
public static final String SETTING_UUID = "index.uuid";
|
||||
public static final String SETTING_LEGACY_ROUTING_HASH_FUNCTION = "index.legacy.routing.hash.type";
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.ObjectIntHashMap;
|
|||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.*;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -578,6 +579,10 @@ public class RoutingNodes implements Iterable<RoutingNode> {
|
|||
}
|
||||
}
|
||||
|
||||
public void sort(Comparator<ShardRouting> comparator) {
|
||||
CollectionUtil.timSort(unassigned, comparator);
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return unassigned.size();
|
||||
}
|
||||
|
|
|
@ -118,6 +118,8 @@ public class Joda {
|
|||
formatter = ISODateTimeFormat.ordinalDateTimeNoMillis();
|
||||
} else if ("time".equals(input)) {
|
||||
formatter = ISODateTimeFormat.time();
|
||||
} else if ("timeNoMillis".equals(input) || "time_no_millis".equals(input)) {
|
||||
formatter = ISODateTimeFormat.timeNoMillis();
|
||||
} else if ("tTime".equals(input) || "t_time".equals(input)) {
|
||||
formatter = ISODateTimeFormat.tTime();
|
||||
} else if ("tTimeNoMillis".equals(input) || "t_time_no_millis".equals(input)) {
|
||||
|
@ -126,10 +128,14 @@ public class Joda {
|
|||
formatter = ISODateTimeFormat.weekDate();
|
||||
} else if ("weekDateTime".equals(input) || "week_date_time".equals(input)) {
|
||||
formatter = ISODateTimeFormat.weekDateTime();
|
||||
} else if ("weekDateTimeNoMillis".equals(input) || "week_date_time_no_millis".equals(input)) {
|
||||
formatter = ISODateTimeFormat.weekDateTimeNoMillis();
|
||||
} else if ("weekyear".equals(input) || "week_year".equals(input)) {
|
||||
formatter = ISODateTimeFormat.weekyear();
|
||||
} else if ("weekyearWeek".equals(input)) {
|
||||
} else if ("weekyearWeek".equals(input) || "weekyear_week".equals(input)) {
|
||||
formatter = ISODateTimeFormat.weekyearWeek();
|
||||
} else if ("weekyearWeekDay".equals(input) || "weekyear_week_day".equals(input)) {
|
||||
formatter = ISODateTimeFormat.weekyearWeekDay();
|
||||
} else if ("year".equals(input)) {
|
||||
formatter = ISODateTimeFormat.year();
|
||||
} else if ("yearMonth".equals(input) || "year_month".equals(input)) {
|
||||
|
@ -140,6 +146,77 @@ public class Joda {
|
|||
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(false), new EpochTimeParser(false)).toFormatter();
|
||||
} else if ("epoch_millis".equals(input)) {
|
||||
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(true), new EpochTimeParser(true)).toFormatter();
|
||||
// strict date formats here, must be at least 4 digits for year and two for months and two for day
|
||||
} else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.basicWeekDate();
|
||||
} else if ("strictBasicWeekDateTime".equals(input) || "strict_basic_week_date_time".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.basicWeekDateTime();
|
||||
} else if ("strictBasicWeekDateTimeNoMillis".equals(input) || "strict_basic_week_date_time_no_millis".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.basicWeekDateTimeNoMillis();
|
||||
} else if ("strictDate".equals(input) || "strict_date".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.date();
|
||||
} else if ("strictDateHour".equals(input) || "strict_date_hour".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.dateHour();
|
||||
} else if ("strictDateHourMinute".equals(input) || "strict_date_hour_minute".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.dateHourMinute();
|
||||
} else if ("strictDateHourMinuteSecond".equals(input) || "strict_date_hour_minute_second".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.dateHourMinuteSecond();
|
||||
} else if ("strictDateHourMinuteSecondFraction".equals(input) || "strict_date_hour_minute_second_fraction".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.dateHourMinuteSecondFraction();
|
||||
} else if ("strictDateHourMinuteSecondMillis".equals(input) || "strict_date_hour_minute_second_millis".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.dateHourMinuteSecondMillis();
|
||||
} else if ("strictDateOptionalTime".equals(input) || "strict_date_optional_time".equals(input)) {
|
||||
// in this case, we have a separate parser and printer since the dataOptionalTimeParser can't print
|
||||
// this sucks we should use the root local by default and not be dependent on the node
|
||||
return new FormatDateTimeFormatter(input,
|
||||
StrictISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC),
|
||||
StrictISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC), locale);
|
||||
} else if ("strictDateTime".equals(input) || "strict_date_time".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.dateTime();
|
||||
} else if ("strictDateTimeNoMillis".equals(input) || "strict_date_time_no_millis".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.dateTimeNoMillis();
|
||||
} else if ("strictHour".equals(input) || "strict_hour".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.hour();
|
||||
} else if ("strictHourMinute".equals(input) || "strict_hour_minute".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.hourMinute();
|
||||
} else if ("strictHourMinuteSecond".equals(input) || "strict_hour_minute_second".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.hourMinuteSecond();
|
||||
} else if ("strictHourMinuteSecondFraction".equals(input) || "strict_hour_minute_second_fraction".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.hourMinuteSecondFraction();
|
||||
} else if ("strictHourMinuteSecondMillis".equals(input) || "strict_hour_minute_second_millis".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.hourMinuteSecondMillis();
|
||||
} else if ("strictOrdinalDate".equals(input) || "strict_ordinal_date".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.ordinalDate();
|
||||
} else if ("strictOrdinalDateTime".equals(input) || "strict_ordinal_date_time".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.ordinalDateTime();
|
||||
} else if ("strictOrdinalDateTimeNoMillis".equals(input) || "strict_ordinal_date_time_no_millis".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.ordinalDateTimeNoMillis();
|
||||
} else if ("strictTime".equals(input) || "strict_time".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.time();
|
||||
} else if ("strictTimeNoMillis".equals(input) || "strict_time_no_millis".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.timeNoMillis();
|
||||
} else if ("strictTTime".equals(input) || "strict_t_time".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.tTime();
|
||||
} else if ("strictTTimeNoMillis".equals(input) || "strict_t_time_no_millis".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.tTimeNoMillis();
|
||||
} else if ("strictWeekDate".equals(input) || "strict_week_date".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.weekDate();
|
||||
} else if ("strictWeekDateTime".equals(input) || "strict_week_date_time".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.weekDateTime();
|
||||
} else if ("strictWeekDateTimeNoMillis".equals(input) || "strict_week_date_time_no_millis".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.weekDateTimeNoMillis();
|
||||
} else if ("strictWeekyear".equals(input) || "strict_weekyear".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.weekyear();
|
||||
} else if ("strictWeekyearWeek".equals(input) || "strict_weekyear_week".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.weekyearWeek();
|
||||
} else if ("strictWeekyearWeekDay".equals(input) || "strict_weekyear_week_day".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.weekyearWeekDay();
|
||||
} else if ("strictYear".equals(input) || "strict_year".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.year();
|
||||
} else if ("strictYearMonth".equals(input) || "strict_year_month".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.yearMonth();
|
||||
} else if ("strictYearMonthDay".equals(input) || "strict_year_month_day".equals(input)) {
|
||||
formatter = StrictISODateTimeFormat.yearMonthDay();
|
||||
} else if (Strings.hasLength(input) && input.contains("||")) {
|
||||
String[] formats = Strings.delimitedListToStringArray(input, "||");
|
||||
DateTimeParser[] parsers = new DateTimeParser[formats.length];
|
||||
|
@ -171,6 +248,38 @@ public class Joda {
|
|||
return new FormatDateTimeFormatter(input, formatter.withZone(DateTimeZone.UTC), locale);
|
||||
}
|
||||
|
||||
public static FormatDateTimeFormatter getStrictStandardDateFormatter() {
|
||||
// 2014/10/10
|
||||
DateTimeFormatter shortFormatter = new DateTimeFormatterBuilder()
|
||||
.appendFixedDecimal(DateTimeFieldType.year(), 4)
|
||||
.appendLiteral('/')
|
||||
.appendFixedDecimal(DateTimeFieldType.monthOfYear(), 2)
|
||||
.appendLiteral('/')
|
||||
.appendFixedDecimal(DateTimeFieldType.dayOfMonth(), 2)
|
||||
.toFormatter()
|
||||
.withZoneUTC();
|
||||
|
||||
// 2014/10/10 12:12:12
|
||||
DateTimeFormatter longFormatter = new DateTimeFormatterBuilder()
|
||||
.appendFixedDecimal(DateTimeFieldType.year(), 4)
|
||||
.appendLiteral('/')
|
||||
.appendFixedDecimal(DateTimeFieldType.monthOfYear(), 2)
|
||||
.appendLiteral('/')
|
||||
.appendFixedDecimal(DateTimeFieldType.dayOfMonth(), 2)
|
||||
.appendLiteral(' ')
|
||||
.appendFixedSignedDecimal(DateTimeFieldType.hourOfDay(), 2)
|
||||
.appendLiteral(':')
|
||||
.appendFixedSignedDecimal(DateTimeFieldType.minuteOfHour(), 2)
|
||||
.appendLiteral(':')
|
||||
.appendFixedSignedDecimal(DateTimeFieldType.secondOfMinute(), 2)
|
||||
.toFormatter()
|
||||
.withZoneUTC();
|
||||
|
||||
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(), new DateTimeParser[] {longFormatter.getParser(), shortFormatter.getParser()});
|
||||
|
||||
return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd", builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
|
||||
}
|
||||
|
||||
|
||||
public static final DurationFieldType Quarters = new DurationFieldType("quarters") {
|
||||
private static final long serialVersionUID = -8167713675442491871L;
|
||||
|
|
|
@ -39,10 +39,7 @@ import org.apache.lucene.analysis.TokenStream;
|
|||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.similarities.DefaultSimilarity;
|
||||
import org.apache.lucene.search.similarities.TFIDFSimilarity;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -855,9 +852,12 @@ public final class XMoreLikeThis {
|
|||
continue;
|
||||
}
|
||||
|
||||
PostingsEnum docs = termsEnum.postings(null, null);
|
||||
final int freq = docs.freq();
|
||||
|
||||
final PostingsEnum docs = termsEnum.postings(null, null);
|
||||
int freq = 0;
|
||||
while(docs != null && docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||
freq += docs.freq();
|
||||
}
|
||||
|
||||
// increment frequency
|
||||
Int cnt = termFreqMap.get(term);
|
||||
if (cnt == null) {
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.common.settings;
|
|||
import com.google.common.base.Charsets;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSortedMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
|
@ -81,7 +82,9 @@ public final class Settings implements ToXContent {
|
|||
private transient ClassLoader classLoader;
|
||||
|
||||
Settings(Map<String, String> settings, ClassLoader classLoader) {
|
||||
this.settings = ImmutableMap.copyOf(settings);
|
||||
// we use a sorted map for consistent serialization when using getAsMap()
|
||||
// TODO: use Collections.unmodifiableMap with a TreeMap
|
||||
this.settings = ImmutableSortedMap.copyOf(settings);
|
||||
Map<String, String> forcedUnderscoreSettings = null;
|
||||
for (Map.Entry<String, String> entry : settings.entrySet()) {
|
||||
String toUnderscoreCase = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
|
|
@ -67,6 +67,15 @@ public class AtomicArray<E> {
|
|||
}
|
||||
}
|
||||
|
||||
public final void setOnce(int i, E value) {
|
||||
if (array.compareAndSet(i, null, value) == false) {
|
||||
throw new IllegalStateException("index [" + i + "] has already been set");
|
||||
}
|
||||
if (nonNullList != null) { // read first, lighter, and most times it will be null...
|
||||
nonNullList = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current value at position {@code i}.
|
||||
*
|
||||
|
|
|
@ -41,8 +41,8 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.store.FsDirectoryService;
|
||||
import org.elasticsearch.monitor.fs.FsStats;
|
||||
import org.elasticsearch.monitor.fs.JmxFsProbe;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.fs.FsProbe;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
@ -225,38 +225,37 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
|||
spinsDesc = "no";
|
||||
}
|
||||
|
||||
FsStats.Info fsInfo = JmxFsProbe.getFSInfo(nodePath);
|
||||
FsInfo.Path fsPath = FsProbe.getFSInfo(nodePath);
|
||||
sb.append(", free_space [")
|
||||
.append(fsInfo.getFree())
|
||||
.append(fsPath.getFree())
|
||||
.append("], usable_space [")
|
||||
.append(fsInfo.getAvailable())
|
||||
.append(fsPath.getAvailable())
|
||||
.append("], total_space [")
|
||||
.append(fsInfo.getTotal())
|
||||
.append(fsPath.getTotal())
|
||||
.append("], spins? [")
|
||||
.append(spinsDesc)
|
||||
.append("], mount [")
|
||||
.append(fsInfo.getMount())
|
||||
.append(fsPath.getMount())
|
||||
.append("], type [")
|
||||
.append(fsInfo.getType())
|
||||
.append(fsPath.getType())
|
||||
.append(']');
|
||||
}
|
||||
logger.debug(sb.toString());
|
||||
} else if (logger.isInfoEnabled()) {
|
||||
FsStats.Info totFSInfo = new FsStats.Info();
|
||||
FsInfo.Path totFSPath = new FsInfo.Path();
|
||||
Set<String> allTypes = new HashSet<>();
|
||||
Set<String> allSpins = new HashSet<>();
|
||||
Set<String> allMounts = new HashSet<>();
|
||||
for (NodePath nodePath : nodePaths) {
|
||||
// TODO: can/should I use the chosen FsProbe instead (i.e. sigar if it's available)?
|
||||
FsStats.Info fsInfo = JmxFsProbe.getFSInfo(nodePath);
|
||||
String mount = fsInfo.getMount();
|
||||
FsInfo.Path fsPath = FsProbe.getFSInfo(nodePath);
|
||||
String mount = fsPath.getMount();
|
||||
if (allMounts.contains(mount) == false) {
|
||||
allMounts.add(mount);
|
||||
String type = fsInfo.getType();
|
||||
String type = fsPath.getType();
|
||||
if (type != null) {
|
||||
allTypes.add(type);
|
||||
}
|
||||
Boolean spins = fsInfo.getSpins();
|
||||
Boolean spins = fsPath.getSpins();
|
||||
if (spins == null) {
|
||||
allSpins.add("unknown");
|
||||
} else if (spins.booleanValue()) {
|
||||
|
@ -264,7 +263,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
|||
} else {
|
||||
allSpins.add("no");
|
||||
}
|
||||
totFSInfo.add(fsInfo);
|
||||
totFSPath.add(fsPath);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -273,8 +272,8 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
|
|||
"using [%d] data paths, mounts [%s], net usable_space [%s], net total_space [%s], spins? [%s], types [%s]",
|
||||
nodePaths.length,
|
||||
allMounts,
|
||||
totFSInfo.getAvailable(),
|
||||
totFSInfo.getTotal(),
|
||||
totFSPath.getAvailable(),
|
||||
totFSPath.getTotal(),
|
||||
toString(allSpins),
|
||||
toString(allTypes)));
|
||||
}
|
||||
|
|
|
@ -147,8 +147,17 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
RoutingNodes routingNodes = allocation.routingNodes();
|
||||
|
||||
// First, handle primaries, they must find a place to be allocated on here
|
||||
MetaData metaData = routingNodes.metaData();
|
||||
Iterator<ShardRouting> unassignedIterator = routingNodes.unassigned().iterator();
|
||||
final MetaData metaData = routingNodes.metaData();
|
||||
RoutingNodes.UnassignedShards unassigned = routingNodes.unassigned();
|
||||
unassigned.sort(new PriorityComparator() {
|
||||
|
||||
@Override
|
||||
protected Settings getIndexSettings(String index) {
|
||||
IndexMetaData indexMetaData = metaData.index(index);
|
||||
return indexMetaData.getSettings();
|
||||
}
|
||||
}); // sort for priority ordering
|
||||
Iterator<ShardRouting> unassignedIterator = unassigned.iterator();
|
||||
while (unassignedIterator.hasNext()) {
|
||||
ShardRouting shard = unassignedIterator.next();
|
||||
|
||||
|
@ -368,7 +377,7 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
}
|
||||
|
||||
// Now, handle replicas, try to assign them to nodes that are similar to the one the primary was allocated on
|
||||
unassignedIterator = routingNodes.unassigned().iterator();
|
||||
unassignedIterator = unassigned.iterator();
|
||||
while (unassignedIterator.hasNext()) {
|
||||
ShardRouting shard = unassignedIterator.next();
|
||||
if (shard.primary()) {
|
||||
|
@ -542,4 +551,5 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
routingService.reroute("async_shard_fetch");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gateway;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
/**
|
||||
* A comparator that compares ShardRouting based on it's indexes priority (index.priority),
|
||||
* it's creation date (index.creation_date), or eventually by it's index name in reverse order.
|
||||
* We try to recover first shards from an index with the highest priority, if that's the same
|
||||
* we try to compare the timestamp the index is created and pick the newer first (time-based indices,
|
||||
* here the newer indices matter more). If even that is the same, we compare the index name which is useful
|
||||
* if the date is baked into the index name. ie logstash-2015.05.03.
|
||||
*/
|
||||
abstract class PriorityComparator implements Comparator<ShardRouting> {
|
||||
|
||||
@Override
|
||||
public final int compare(ShardRouting o1, ShardRouting o2) {
|
||||
final String o1Index = o1.index();
|
||||
final String o2Index = o2.index();
|
||||
int cmp = 0;
|
||||
if (o1Index.equals(o2Index) == false) {
|
||||
final Settings settingsO1 = getIndexSettings(o1Index);
|
||||
final Settings settingsO2 = getIndexSettings(o2Index);
|
||||
cmp = Long.compare(priority(settingsO2), priority(settingsO1));
|
||||
if (cmp == 0) {
|
||||
cmp = Long.compare(timeCreated(settingsO2), timeCreated(settingsO1));
|
||||
if (cmp == 0) {
|
||||
cmp = o2Index.compareTo(o1Index);
|
||||
}
|
||||
}
|
||||
}
|
||||
return cmp;
|
||||
}
|
||||
|
||||
private int priority(Settings settings) {
|
||||
return settings.getAsInt(IndexMetaData.SETTING_PRIORITY, 1);
|
||||
}
|
||||
|
||||
private long timeCreated(Settings settings) {
|
||||
return settings.getAsLong(IndexMetaData.SETTING_CREATION_DATE, -1l);
|
||||
}
|
||||
|
||||
protected abstract Settings getIndexSettings(String index);
|
||||
}
|
|
@ -510,26 +510,17 @@ public abstract class Engine implements Closeable {
|
|||
*/
|
||||
public abstract SnapshotIndexCommit snapshotIndex(boolean flushFirst) throws EngineException;
|
||||
|
||||
/** fail engine due to some error. the engine will also be closed. */
|
||||
public void failEngine(String reason, Throwable failure) {
|
||||
assert failure != null;
|
||||
/**
|
||||
* fail engine due to some error. the engine will also be closed.
|
||||
* The underlying store is marked corrupted iff failure is caused by index corruption
|
||||
*/
|
||||
public void failEngine(String reason, @Nullable Throwable failure) {
|
||||
if (failEngineLock.tryLock()) {
|
||||
store.incRef();
|
||||
try {
|
||||
try {
|
||||
// we just go and close this engine - no way to recover
|
||||
closeNoLock("engine failed on: [" + reason + "]");
|
||||
// we first mark the store as corrupted before we notify any listeners
|
||||
// this must happen first otherwise we might try to reallocate so quickly
|
||||
// on the same node that we don't see the corrupted marker file when
|
||||
// the shard is initializing
|
||||
if (Lucene.isCorruptionException(failure)) {
|
||||
try {
|
||||
store.markStoreCorrupted(ExceptionsHelper.unwrapCorruption(failure));
|
||||
} catch (IOException e) {
|
||||
logger.warn("Couldn't marks store corrupted", e);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (failedEngine != null) {
|
||||
logger.debug("tried to fail engine but engine is already failed. ignoring. [{}]", reason, failure);
|
||||
|
@ -537,7 +528,18 @@ public abstract class Engine implements Closeable {
|
|||
}
|
||||
logger.warn("failed engine [{}]", failure, reason);
|
||||
// we must set a failure exception, generate one if not supplied
|
||||
failedEngine = failure;
|
||||
failedEngine = (failure != null) ? failure : new IllegalStateException(reason);
|
||||
// we first mark the store as corrupted before we notify any listeners
|
||||
// this must happen first otherwise we might try to reallocate so quickly
|
||||
// on the same node that we don't see the corrupted marker file when
|
||||
// the shard is initializing
|
||||
if (Lucene.isCorruptionException(failure)) {
|
||||
try {
|
||||
store.markStoreCorrupted(new IOException("failed engine (reason: [" + reason + "])", ExceptionsHelper.unwrapCorruption(failure)));
|
||||
} catch (IOException e) {
|
||||
logger.warn("Couldn't mark store corrupted", e);
|
||||
}
|
||||
}
|
||||
failedEngineListener.onFailedEngine(shardId, reason, failure);
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
|
@ -554,10 +556,10 @@ public abstract class Engine implements Closeable {
|
|||
/** Check whether the engine should be failed */
|
||||
protected boolean maybeFailEngine(String source, Throwable t) {
|
||||
if (Lucene.isCorruptionException(t)) {
|
||||
failEngine("corrupt file detected source: [" + source + "]", t);
|
||||
failEngine("corrupt file (source: [" + source + "])", t);
|
||||
return true;
|
||||
} else if (ExceptionsHelper.isOOM(t)) {
|
||||
failEngine("out of memory", t);
|
||||
failEngine("out of memory (source: [" + source + "])", t);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -399,10 +399,10 @@ public class DocumentMapper implements ToXContent {
|
|||
return mapperService.getParentTypes().contains(type);
|
||||
}
|
||||
|
||||
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
|
||||
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
|
||||
assert mappingLock.isWriteLockedByCurrentThread();
|
||||
// first ensure we don't have any incompatible new fields
|
||||
mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, true);
|
||||
mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, updateAllTypes);
|
||||
|
||||
// update mappers for this document type
|
||||
MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers);
|
||||
|
@ -424,7 +424,7 @@ public class DocumentMapper implements ToXContent {
|
|||
final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes);
|
||||
this.mapping.merge(mapping, mergeResult);
|
||||
if (simulate == false) {
|
||||
addMappers(mergeResult.getNewObjectMappers(), mergeResult.getNewFieldMappers());
|
||||
addMappers(mergeResult.getNewObjectMappers(), mergeResult.getNewFieldMappers(), updateAllTypes);
|
||||
refreshSource();
|
||||
}
|
||||
return mergeResult;
|
||||
|
|
|
@ -19,45 +19,725 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.core.TypeParsers;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.similarity.SimilarityLookupService;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
public abstract class FieldMapper extends Mapper {
|
||||
|
||||
public FieldMapper(String simpleName) {
|
||||
super(simpleName);
|
||||
public abstract static class Builder<T extends Builder, Y extends FieldMapper> extends Mapper.Builder<T, Y> {
|
||||
|
||||
protected final MappedFieldType fieldType;
|
||||
protected final MappedFieldType defaultFieldType;
|
||||
private final IndexOptions defaultOptions;
|
||||
protected boolean omitNormsSet = false;
|
||||
protected String indexName;
|
||||
protected Boolean includeInAll;
|
||||
protected boolean indexOptionsSet = false;
|
||||
protected boolean docValuesSet = false;
|
||||
@Nullable
|
||||
protected Settings fieldDataSettings;
|
||||
protected final MultiFields.Builder multiFieldsBuilder;
|
||||
protected CopyTo copyTo;
|
||||
|
||||
protected Builder(String name, MappedFieldType fieldType) {
|
||||
super(name);
|
||||
this.fieldType = fieldType.clone();
|
||||
this.defaultFieldType = fieldType.clone();
|
||||
this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable
|
||||
multiFieldsBuilder = new MultiFields.Builder();
|
||||
}
|
||||
|
||||
public MappedFieldType fieldType() {
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
public T index(boolean index) {
|
||||
if (index) {
|
||||
if (fieldType.indexOptions() == IndexOptions.NONE) {
|
||||
/*
|
||||
* the logic here is to reset to the default options only if we are not indexed ie. options are null
|
||||
* if the fieldType has a non-null option we are all good it might have been set through a different
|
||||
* call.
|
||||
*/
|
||||
final IndexOptions options = getDefaultIndexOption();
|
||||
assert options != IndexOptions.NONE : "default IndexOptions is NONE can't enable indexing";
|
||||
fieldType.setIndexOptions(options);
|
||||
}
|
||||
} else {
|
||||
fieldType.setIndexOptions(IndexOptions.NONE);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected IndexOptions getDefaultIndexOption() {
|
||||
return defaultOptions;
|
||||
}
|
||||
|
||||
public T store(boolean store) {
|
||||
this.fieldType.setStored(store);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T docValues(boolean docValues) {
|
||||
this.fieldType.setHasDocValues(docValues);
|
||||
this.docValuesSet = true;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T storeTermVectors(boolean termVectors) {
|
||||
if (termVectors != this.fieldType.storeTermVectors()) {
|
||||
this.fieldType.setStoreTermVectors(termVectors);
|
||||
} // don't set it to false, it is default and might be flipped by a more specific option
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T storeTermVectorOffsets(boolean termVectorOffsets) {
|
||||
if (termVectorOffsets) {
|
||||
this.fieldType.setStoreTermVectors(termVectorOffsets);
|
||||
}
|
||||
this.fieldType.setStoreTermVectorOffsets(termVectorOffsets);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T storeTermVectorPositions(boolean termVectorPositions) {
|
||||
if (termVectorPositions) {
|
||||
this.fieldType.setStoreTermVectors(termVectorPositions);
|
||||
}
|
||||
this.fieldType.setStoreTermVectorPositions(termVectorPositions);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T storeTermVectorPayloads(boolean termVectorPayloads) {
|
||||
if (termVectorPayloads) {
|
||||
this.fieldType.setStoreTermVectors(termVectorPayloads);
|
||||
}
|
||||
this.fieldType.setStoreTermVectorPayloads(termVectorPayloads);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T tokenized(boolean tokenized) {
|
||||
this.fieldType.setTokenized(tokenized);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T boost(float boost) {
|
||||
this.fieldType.setBoost(boost);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T omitNorms(boolean omitNorms) {
|
||||
this.fieldType.setOmitNorms(omitNorms);
|
||||
this.omitNormsSet = true;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T indexOptions(IndexOptions indexOptions) {
|
||||
this.fieldType.setIndexOptions(indexOptions);
|
||||
this.indexOptionsSet = true;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T indexName(String indexName) {
|
||||
this.indexName = indexName;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
|
||||
this.fieldType.setIndexAnalyzer(indexAnalyzer);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
|
||||
this.fieldType.setSearchAnalyzer(searchAnalyzer);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T includeInAll(Boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T similarity(SimilarityProvider similarity) {
|
||||
this.fieldType.setSimilarity(similarity);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T normsLoading(MappedFieldType.Loading normsLoading) {
|
||||
this.fieldType.setNormsLoading(normsLoading);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T fieldDataSettings(Settings settings) {
|
||||
this.fieldDataSettings = settings;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public Builder nullValue(Object nullValue) {
|
||||
this.fieldType.setNullValue(nullValue);
|
||||
return this;
|
||||
}
|
||||
|
||||
public T multiFieldPathType(ContentPath.Type pathType) {
|
||||
multiFieldsBuilder.pathType(pathType);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T addMultiField(Mapper.Builder mapperBuilder) {
|
||||
multiFieldsBuilder.add(mapperBuilder);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T copyTo(CopyTo copyTo) {
|
||||
this.copyTo = copyTo;
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected MappedFieldType.Names buildNames(BuilderContext context) {
|
||||
return new MappedFieldType.Names(buildIndexName(context), buildIndexNameClean(context), buildFullName(context));
|
||||
}
|
||||
|
||||
protected String buildIndexName(BuilderContext context) {
|
||||
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
|
||||
return buildFullName(context);
|
||||
}
|
||||
String actualIndexName = indexName == null ? name : indexName;
|
||||
return context.path().pathAsText(actualIndexName);
|
||||
}
|
||||
|
||||
protected String buildIndexNameClean(BuilderContext context) {
|
||||
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
|
||||
return buildFullName(context);
|
||||
}
|
||||
return indexName == null ? name : indexName;
|
||||
}
|
||||
|
||||
protected String buildFullName(BuilderContext context) {
|
||||
return context.path().fullPathAsText(name);
|
||||
}
|
||||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
fieldType.setNames(buildNames(context));
|
||||
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
|
||||
fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
}
|
||||
if (fieldDataSettings != null) {
|
||||
Settings settings = Settings.builder().put(fieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
|
||||
fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings));
|
||||
}
|
||||
boolean defaultDocValues = false; // pre 2.0
|
||||
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
|
||||
defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
|
||||
}
|
||||
// backcompat for "fielddata: format: docvalues" for now...
|
||||
boolean fieldDataDocValues = fieldType.fieldDataType() != null
|
||||
&& FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldType.fieldDataType().getFormat(context.indexSettings()));
|
||||
if (fieldDataDocValues && docValuesSet && fieldType.hasDocValues() == false) {
|
||||
// this forces the doc_values setting to be written, so fielddata does not mask the original setting
|
||||
defaultDocValues = true;
|
||||
}
|
||||
defaultFieldType.setHasDocValues(defaultDocValues);
|
||||
if (docValuesSet == false) {
|
||||
fieldType.setHasDocValues(defaultDocValues || fieldDataDocValues);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public abstract MappedFieldType fieldType();
|
||||
protected MappedFieldTypeReference fieldTypeRef;
|
||||
protected final MappedFieldType defaultFieldType;
|
||||
protected final MultiFields multiFields;
|
||||
protected CopyTo copyTo;
|
||||
protected final boolean indexCreatedBefore2x;
|
||||
|
||||
protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName);
|
||||
assert indexSettings != null;
|
||||
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
|
||||
this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // the reference ctor freezes the field type
|
||||
defaultFieldType.freeze();
|
||||
this.defaultFieldType = defaultFieldType;
|
||||
this.multiFields = multiFields;
|
||||
this.copyTo = copyTo;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return fieldType().names().fullName();
|
||||
}
|
||||
|
||||
public MappedFieldType fieldType() {
|
||||
return fieldTypeRef.get();
|
||||
}
|
||||
|
||||
/** Returns a reference to the MappedFieldType for this mapper. */
|
||||
public abstract MappedFieldTypeReference fieldTypeReference();
|
||||
public MappedFieldTypeReference fieldTypeReference() {
|
||||
return fieldTypeRef;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the reference to this field's MappedFieldType.
|
||||
* Implementations should assert equality of the underlying field type
|
||||
*/
|
||||
public abstract void setFieldTypeReference(MappedFieldTypeReference ref);
|
||||
public void setFieldTypeReference(MappedFieldTypeReference ref) {
|
||||
if (ref.get().equals(fieldType()) == false) {
|
||||
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
|
||||
}
|
||||
ref.incrementAssociatedMappers();
|
||||
this.fieldTypeRef = ref;
|
||||
}
|
||||
|
||||
/**
|
||||
* List of fields where this field should be copied to
|
||||
*/
|
||||
public abstract AbstractFieldMapper.CopyTo copyTo();
|
||||
|
||||
/**
|
||||
* Fields might not be available before indexing, for example _all, token_count,...
|
||||
* When get is called and these fields are requested, this case needs special treatment.
|
||||
*
|
||||
* @return If the field is available before indexing or not.
|
||||
* */
|
||||
public abstract boolean isGenerated();
|
||||
public CopyTo copyTo() {
|
||||
return copyTo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse using the provided {@link ParseContext} and return a mapping
|
||||
* update if dynamic mappings modified the mappings, or {@code null} if
|
||||
* mappings were not modified.
|
||||
*/
|
||||
public abstract Mapper parse(ParseContext context) throws IOException;
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
final List<Field> fields = new ArrayList<>(2);
|
||||
try {
|
||||
parseCreateField(context, fields);
|
||||
for (Field field : fields) {
|
||||
if (!customBoost()) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
context.doc().add(field);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e);
|
||||
}
|
||||
multiFields.parse(this, context);
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the field value and populate <code>fields</code>.
|
||||
*/
|
||||
protected abstract void parseCreateField(ParseContext context, List<Field> fields) throws IOException;
|
||||
|
||||
/**
|
||||
* Derived classes can override it to specify that boost value is set by derived classes.
|
||||
*/
|
||||
protected boolean customBoost() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Iterator<Mapper> iterator() {
|
||||
if (multiFields == null) {
|
||||
return Collections.emptyIterator();
|
||||
}
|
||||
return multiFields.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
String mergedType = mergeWith.getClass().getSimpleName();
|
||||
if (mergeWith instanceof FieldMapper) {
|
||||
mergedType = ((FieldMapper) mergeWith).contentType();
|
||||
}
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
|
||||
// different types, return
|
||||
return;
|
||||
}
|
||||
FieldMapper fieldMergeWith = (FieldMapper) mergeWith;
|
||||
List<String> subConflicts = new ArrayList<>(); // TODO: just expose list from MergeResult?
|
||||
fieldType().checkTypeName(fieldMergeWith.fieldType(), subConflicts);
|
||||
if (subConflicts.isEmpty() == false) {
|
||||
// return early if field types don't match
|
||||
assert subConflicts.size() == 1;
|
||||
mergeResult.addConflict(subConflicts.get(0));
|
||||
return;
|
||||
}
|
||||
|
||||
boolean strict = this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false;
|
||||
fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts, strict);
|
||||
for (String conflict : subConflicts) {
|
||||
mergeResult.addConflict(conflict);
|
||||
}
|
||||
multiFields.merge(mergeWith, mergeResult);
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
// apply changeable values
|
||||
MappedFieldType fieldType = fieldMergeWith.fieldType().clone();
|
||||
fieldType.freeze();
|
||||
fieldTypeRef.set(fieldType);
|
||||
this.copyTo = fieldMergeWith.copyTo;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(simpleName());
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
doXContentBody(builder, includeDefaults, params);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
|
||||
builder.field("type", contentType());
|
||||
if (indexCreatedBefore2x && (includeDefaults || !simpleName().equals(fieldType().names().originalIndexName()))) {
|
||||
builder.field("index_name", fieldType().names().originalIndexName());
|
||||
}
|
||||
|
||||
if (includeDefaults || fieldType().boost() != 1.0f) {
|
||||
builder.field("boost", fieldType().boost());
|
||||
}
|
||||
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE;
|
||||
if (includeDefaults || indexed != defaultIndexed ||
|
||||
fieldType().tokenized() != defaultFieldType.tokenized()) {
|
||||
builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
|
||||
}
|
||||
if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
doXContentDocValues(builder, includeDefaults);
|
||||
if (includeDefaults || fieldType().storeTermVectors() != defaultFieldType.storeTermVectors()) {
|
||||
builder.field("term_vector", termVectorOptionsToString(fieldType()));
|
||||
}
|
||||
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms() || fieldType().normsLoading() != null) {
|
||||
builder.startObject("norms");
|
||||
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms()) {
|
||||
builder.field("enabled", !fieldType().omitNorms());
|
||||
}
|
||||
if (fieldType().normsLoading() != null) {
|
||||
builder.field(MappedFieldType.Loading.KEY, fieldType().normsLoading());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
if (indexed && (includeDefaults || fieldType().indexOptions() != defaultFieldType.indexOptions())) {
|
||||
builder.field("index_options", indexOptionToString(fieldType().indexOptions()));
|
||||
}
|
||||
|
||||
doXContentAnalyzers(builder, includeDefaults);
|
||||
|
||||
if (fieldType().similarity() != null) {
|
||||
builder.field("similarity", fieldType().similarity().name());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY);
|
||||
}
|
||||
|
||||
if (includeDefaults || hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
multiFields.toXContent(builder, params);
|
||||
|
||||
if (copyTo != null) {
|
||||
copyTo.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
||||
protected void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
if (fieldType().indexAnalyzer() == null) {
|
||||
if (includeDefaults) {
|
||||
builder.field("analyzer", "default");
|
||||
}
|
||||
} else if (includeDefaults || fieldType().indexAnalyzer().name().startsWith("_") == false && fieldType().indexAnalyzer().name().equals("default") == false) {
|
||||
builder.field("analyzer", fieldType().indexAnalyzer().name());
|
||||
if (fieldType().searchAnalyzer().name().equals(fieldType().indexAnalyzer().name()) == false) {
|
||||
builder.field("search_analyzer", fieldType().searchAnalyzer().name());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void doXContentDocValues(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
if (includeDefaults || defaultFieldType.hasDocValues() != fieldType().hasDocValues()) {
|
||||
builder.field("doc_values", fieldType().hasDocValues());
|
||||
}
|
||||
}
|
||||
|
||||
protected static String indexOptionToString(IndexOptions indexOption) {
|
||||
switch (indexOption) {
|
||||
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
|
||||
return TypeParsers.INDEX_OPTIONS_OFFSETS;
|
||||
case DOCS_AND_FREQS:
|
||||
return TypeParsers.INDEX_OPTIONS_FREQS;
|
||||
case DOCS_AND_FREQS_AND_POSITIONS:
|
||||
return TypeParsers.INDEX_OPTIONS_POSITIONS;
|
||||
case DOCS:
|
||||
return TypeParsers.INDEX_OPTIONS_DOCS;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown IndexOptions [" + indexOption + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public static String termVectorOptionsToString(FieldType fieldType) {
|
||||
if (!fieldType.storeTermVectors()) {
|
||||
return "no";
|
||||
} else if (!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
|
||||
return "yes";
|
||||
} else if (fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
|
||||
return "with_offsets";
|
||||
} else {
|
||||
StringBuilder builder = new StringBuilder("with");
|
||||
if (fieldType.storeTermVectorPositions()) {
|
||||
builder.append("_positions");
|
||||
}
|
||||
if (fieldType.storeTermVectorOffsets()) {
|
||||
builder.append("_offsets");
|
||||
}
|
||||
if (fieldType.storeTermVectorPayloads()) {
|
||||
builder.append("_payloads");
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
|
||||
protected static String indexTokenizeOptionToString(boolean indexed, boolean tokenized) {
|
||||
if (!indexed) {
|
||||
return "no";
|
||||
} else if (tokenized) {
|
||||
return "analyzed";
|
||||
} else {
|
||||
return "not_analyzed";
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean hasCustomFieldDataSettings() {
|
||||
return fieldType().fieldDataType() != null && fieldType().fieldDataType().equals(defaultFieldType.fieldDataType()) == false;
|
||||
}
|
||||
|
||||
protected abstract String contentType();
|
||||
|
||||
public static class MultiFields {
|
||||
|
||||
public static MultiFields empty() {
|
||||
return new MultiFields(ContentPath.Type.FULL, ImmutableOpenMap.<String, FieldMapper>of());
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
|
||||
private ContentPath.Type pathType = ContentPath.Type.FULL;
|
||||
|
||||
public Builder pathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder add(Mapper.Builder builder) {
|
||||
mapperBuilders.put(builder.name(), builder);
|
||||
return this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public MultiFields build(FieldMapper.Builder mainFieldBuilder, BuilderContext context) {
|
||||
if (pathType == ContentPath.Type.FULL && mapperBuilders.isEmpty()) {
|
||||
return empty();
|
||||
} else if (mapperBuilders.isEmpty()) {
|
||||
return new MultiFields(pathType, ImmutableOpenMap.<String, FieldMapper>of());
|
||||
} else {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
context.path().add(mainFieldBuilder.name());
|
||||
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
|
||||
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
|
||||
String key = cursor.key;
|
||||
Mapper.Builder value = cursor.value;
|
||||
Mapper mapper = value.build(context);
|
||||
assert mapper instanceof FieldMapper;
|
||||
mapperBuilders.put(key, mapper);
|
||||
}
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> mappers = mapperBuilders.cast();
|
||||
return new MultiFields(pathType, mappers.build());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
private volatile ImmutableOpenMap<String, FieldMapper> mappers;
|
||||
|
||||
public MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, FieldMapper> mappers) {
|
||||
this.pathType = pathType;
|
||||
this.mappers = mappers;
|
||||
// we disable the all in multi-field mappers
|
||||
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
|
||||
FieldMapper mapper = cursor.value;
|
||||
if (mapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void parse(FieldMapper mainField, ParseContext context) throws IOException {
|
||||
// TODO: multi fields are really just copy fields, we just need to expose "sub fields" or something that can be part of the mappings
|
||||
if (mappers.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
context = context.createMultiFieldContext();
|
||||
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
context.path().add(mainField.simpleName());
|
||||
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
|
||||
cursor.value.parse(context);
|
||||
}
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
}
|
||||
|
||||
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
FieldMapper mergeWithMultiField = (FieldMapper) mergeWith;
|
||||
|
||||
List<FieldMapper> newFieldMappers = null;
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = null;
|
||||
|
||||
for (ObjectCursor<FieldMapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
|
||||
FieldMapper mergeWithMapper = cursor.value;
|
||||
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName());
|
||||
if (mergeIntoMapper == null) {
|
||||
// no mapping, simply add it if not simulating
|
||||
if (!mergeResult.simulate()) {
|
||||
// we disable the all in multi-field mappers
|
||||
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
|
||||
}
|
||||
if (newMappersBuilder == null) {
|
||||
newMappersBuilder = ImmutableOpenMap.builder(mappers);
|
||||
}
|
||||
newMappersBuilder.put(mergeWithMapper.simpleName(), mergeWithMapper);
|
||||
if (mergeWithMapper instanceof FieldMapper) {
|
||||
if (newFieldMappers == null) {
|
||||
newFieldMappers = new ArrayList<>(2);
|
||||
}
|
||||
newFieldMappers.add(mergeWithMapper);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
mergeIntoMapper.merge(mergeWithMapper, mergeResult);
|
||||
}
|
||||
}
|
||||
|
||||
// first add all field mappers
|
||||
if (newFieldMappers != null) {
|
||||
mergeResult.addFieldMappers(newFieldMappers);
|
||||
}
|
||||
// now publish mappers
|
||||
if (newMappersBuilder != null) {
|
||||
mappers = newMappersBuilder.build();
|
||||
}
|
||||
}
|
||||
|
||||
public Iterator<Mapper> iterator() {
|
||||
return Iterators.transform(mappers.values().iterator(), new Function<ObjectCursor<FieldMapper>, Mapper>() {
|
||||
@Override
|
||||
public Mapper apply(@Nullable ObjectCursor<FieldMapper> cursor) {
|
||||
return cursor.value;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (pathType != ContentPath.Type.FULL) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (!mappers.isEmpty()) {
|
||||
// sort the mappers so we get consistent serialization format
|
||||
Mapper[] sortedMappers = mappers.values().toArray(Mapper.class);
|
||||
Arrays.sort(sortedMappers, new Comparator<Mapper>() {
|
||||
@Override
|
||||
public int compare(Mapper o1, Mapper o2) {
|
||||
return o1.name().compareTo(o2.name());
|
||||
}
|
||||
});
|
||||
builder.startObject("fields");
|
||||
for (Mapper mapper : sortedMappers) {
|
||||
mapper.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a list of fields with optional boost factor where the current field should be copied to
|
||||
*/
|
||||
public static class CopyTo {
|
||||
|
||||
private final ImmutableList<String> copyToFields;
|
||||
|
||||
private CopyTo(ImmutableList<String> copyToFields) {
|
||||
this.copyToFields = copyToFields;
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (!copyToFields.isEmpty()) {
|
||||
builder.startArray("copy_to");
|
||||
for (String field : copyToFields) {
|
||||
builder.value(field);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private final ImmutableList.Builder<String> copyToBuilders = ImmutableList.builder();
|
||||
|
||||
public Builder add(String field) {
|
||||
copyToBuilders.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
public CopyTo build() {
|
||||
return new CopyTo(copyToBuilders.build());
|
||||
}
|
||||
}
|
||||
|
||||
public List<String> copyToFields() {
|
||||
return copyToFields;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fields might not be available before indexing, for example _all, token_count,...
|
||||
* When get is called and these fields are requested, this case needs special treatment.
|
||||
*
|
||||
* @return If the field is available before indexing or not.
|
||||
*/
|
||||
public boolean isGenerated() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -25,18 +25,12 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
|
@ -189,6 +183,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
setOmitNorms(false);
|
||||
setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
setBoost(1.0f);
|
||||
fieldDataType = new FieldDataType(typeName());
|
||||
}
|
||||
|
||||
public abstract MappedFieldType clone();
|
||||
|
@ -461,8 +456,8 @@ public abstract class MappedFieldType extends FieldType {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
return new FuzzyQuery(createTerm(value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
return new FuzzyQuery(createTerm(value), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions);
|
||||
}
|
||||
|
||||
public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
|
||||
|
|
|
@ -139,7 +139,7 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
|
|||
}
|
||||
|
||||
/** Returns the simple name, which identifies this mapper against other mappers at the same level in the mappers hierarchy
|
||||
* TODO: make this protected once Mapper, FieldMapper and AbstractFieldMapper are merged together */
|
||||
* TODO: make this protected once Mapper and FieldMapper are merged together */
|
||||
public final String simpleName() {
|
||||
return simpleName;
|
||||
}
|
||||
|
|
|
@ -19,9 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -30,16 +28,16 @@ import java.io.IOException;
|
|||
/**
|
||||
* A mapper for a builtin field containing metadata about a document.
|
||||
*/
|
||||
public abstract class MetadataFieldMapper extends AbstractFieldMapper {
|
||||
public abstract class MetadataFieldMapper extends FieldMapper {
|
||||
|
||||
public abstract static class Builder<T extends Builder, Y extends MetadataFieldMapper> extends AbstractFieldMapper.Builder<T, Y> {
|
||||
public abstract static class Builder<T extends Builder, Y extends MetadataFieldMapper> extends FieldMapper.Builder<T, Y> {
|
||||
public Builder(String name, MappedFieldType fieldType) {
|
||||
super(name, fieldType);
|
||||
}
|
||||
}
|
||||
|
||||
protected MetadataFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(simpleName, fieldType, docValues, fieldDataSettings, indexSettings, MultiFields.empty(), null);
|
||||
protected MetadataFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, MultiFields.empty(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,770 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldTypeReference;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.similarity.SimilarityLookupService;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.DOC_VALUES;
|
||||
|
||||
public abstract class AbstractFieldMapper extends FieldMapper {
|
||||
|
||||
public static class Defaults {
|
||||
public static final float BOOST = 1.0f;
|
||||
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
|
||||
}
|
||||
|
||||
public abstract static class Builder<T extends Builder, Y extends AbstractFieldMapper> extends Mapper.Builder<T, Y> {
|
||||
|
||||
protected final MappedFieldType fieldType;
|
||||
private final IndexOptions defaultOptions;
|
||||
protected Boolean docValues;
|
||||
protected boolean omitNormsSet = false;
|
||||
protected String indexName;
|
||||
protected Boolean includeInAll;
|
||||
protected boolean indexOptionsSet = false;
|
||||
@Nullable
|
||||
protected Settings fieldDataSettings;
|
||||
protected final MultiFields.Builder multiFieldsBuilder;
|
||||
protected CopyTo copyTo;
|
||||
|
||||
protected Builder(String name, MappedFieldType fieldType) {
|
||||
super(name);
|
||||
this.fieldType = fieldType.clone();
|
||||
this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable
|
||||
multiFieldsBuilder = new MultiFields.Builder();
|
||||
}
|
||||
|
||||
public T index(boolean index) {
|
||||
if (index) {
|
||||
if (fieldType.indexOptions() == IndexOptions.NONE) {
|
||||
/*
|
||||
* the logic here is to reset to the default options only if we are not indexed ie. options are null
|
||||
* if the fieldType has a non-null option we are all good it might have been set through a different
|
||||
* call.
|
||||
*/
|
||||
final IndexOptions options = getDefaultIndexOption();
|
||||
assert options != IndexOptions.NONE : "default IndexOptions is NONE can't enable indexing";
|
||||
fieldType.setIndexOptions(options);
|
||||
}
|
||||
} else {
|
||||
fieldType.setIndexOptions(IndexOptions.NONE);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected IndexOptions getDefaultIndexOption() {
|
||||
return defaultOptions;
|
||||
}
|
||||
|
||||
public T store(boolean store) {
|
||||
this.fieldType.setStored(store);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T docValues(boolean docValues) {
|
||||
this.docValues = docValues;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T storeTermVectors(boolean termVectors) {
|
||||
if (termVectors != this.fieldType.storeTermVectors()) {
|
||||
this.fieldType.setStoreTermVectors(termVectors);
|
||||
} // don't set it to false, it is default and might be flipped by a more specific option
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T storeTermVectorOffsets(boolean termVectorOffsets) {
|
||||
if (termVectorOffsets) {
|
||||
this.fieldType.setStoreTermVectors(termVectorOffsets);
|
||||
}
|
||||
this.fieldType.setStoreTermVectorOffsets(termVectorOffsets);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T storeTermVectorPositions(boolean termVectorPositions) {
|
||||
if (termVectorPositions) {
|
||||
this.fieldType.setStoreTermVectors(termVectorPositions);
|
||||
}
|
||||
this.fieldType.setStoreTermVectorPositions(termVectorPositions);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T storeTermVectorPayloads(boolean termVectorPayloads) {
|
||||
if (termVectorPayloads) {
|
||||
this.fieldType.setStoreTermVectors(termVectorPayloads);
|
||||
}
|
||||
this.fieldType.setStoreTermVectorPayloads(termVectorPayloads);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T tokenized(boolean tokenized) {
|
||||
this.fieldType.setTokenized(tokenized);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T boost(float boost) {
|
||||
this.fieldType.setBoost(boost);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T omitNorms(boolean omitNorms) {
|
||||
this.fieldType.setOmitNorms(omitNorms);
|
||||
this.omitNormsSet = true;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T indexOptions(IndexOptions indexOptions) {
|
||||
this.fieldType.setIndexOptions(indexOptions);
|
||||
this.indexOptionsSet = true;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T indexName(String indexName) {
|
||||
this.indexName = indexName;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T indexAnalyzer(NamedAnalyzer indexAnalyzer) {
|
||||
this.fieldType.setIndexAnalyzer(indexAnalyzer);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T searchAnalyzer(NamedAnalyzer searchAnalyzer) {
|
||||
this.fieldType.setSearchAnalyzer(searchAnalyzer);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T includeInAll(Boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T similarity(SimilarityProvider similarity) {
|
||||
this.fieldType.setSimilarity(similarity);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T normsLoading(MappedFieldType.Loading normsLoading) {
|
||||
this.fieldType.setNormsLoading(normsLoading);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T fieldDataSettings(Settings settings) {
|
||||
this.fieldDataSettings = settings;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public Builder nullValue(Object nullValue) {
|
||||
this.fieldType.setNullValue(nullValue);
|
||||
return this;
|
||||
}
|
||||
|
||||
public T multiFieldPathType(ContentPath.Type pathType) {
|
||||
multiFieldsBuilder.pathType(pathType);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T addMultiField(Mapper.Builder mapperBuilder) {
|
||||
multiFieldsBuilder.add(mapperBuilder);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T copyTo(CopyTo copyTo) {
|
||||
this.copyTo = copyTo;
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected MappedFieldType.Names buildNames(BuilderContext context) {
|
||||
return new MappedFieldType.Names(buildIndexName(context), buildIndexNameClean(context), buildFullName(context));
|
||||
}
|
||||
|
||||
protected String buildIndexName(BuilderContext context) {
|
||||
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
|
||||
return buildFullName(context);
|
||||
}
|
||||
String actualIndexName = indexName == null ? name : indexName;
|
||||
return context.path().pathAsText(actualIndexName);
|
||||
}
|
||||
|
||||
protected String buildIndexNameClean(BuilderContext context) {
|
||||
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
|
||||
return buildFullName(context);
|
||||
}
|
||||
return indexName == null ? name : indexName;
|
||||
}
|
||||
|
||||
protected String buildFullName(BuilderContext context) {
|
||||
return context.path().fullPathAsText(name);
|
||||
}
|
||||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
fieldType.setNames(buildNames(context));
|
||||
}
|
||||
}
|
||||
|
||||
protected MappedFieldTypeReference fieldTypeRef;
|
||||
protected final boolean hasDefaultDocValues;
|
||||
protected Settings customFieldDataSettings;
|
||||
protected final MultiFields multiFields;
|
||||
protected CopyTo copyTo;
|
||||
protected final boolean indexCreatedBefore2x;
|
||||
|
||||
protected AbstractFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
this(simpleName, fieldType, docValues, fieldDataSettings, indexSettings, MultiFields.empty(), null);
|
||||
}
|
||||
|
||||
protected AbstractFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName);
|
||||
assert indexSettings != null;
|
||||
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
|
||||
this.customFieldDataSettings = fieldDataSettings;
|
||||
FieldDataType fieldDataType;
|
||||
if (fieldDataSettings == null) {
|
||||
fieldDataType = defaultFieldDataType();
|
||||
} else {
|
||||
// create a new field data type, with the default settings as well as the "new ones"
|
||||
fieldDataType = new FieldDataType(defaultFieldDataType().getType(),
|
||||
Settings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings)
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: hasDocValues should just be set directly on the field type by callers of this ctor, but
|
||||
// then we need to eliminate defaultDocValues() (only needed by geo, which needs to be fixed with passing
|
||||
// doc values setting down to lat/lon) and get rid of specifying doc values in fielddata (which
|
||||
// complicates whether we can just compare to the default value to know whether to write the setting)
|
||||
if (docValues == null && fieldDataType != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings))) {
|
||||
docValues = true;
|
||||
}
|
||||
hasDefaultDocValues = docValues == null;
|
||||
|
||||
this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // must init first so defaultDocValues() can be called
|
||||
fieldType = fieldType.clone();
|
||||
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
|
||||
fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
}
|
||||
fieldType.setHasDocValues(docValues == null ? defaultDocValues() : docValues);
|
||||
fieldType.setFieldDataType(fieldDataType);
|
||||
fieldType.freeze();
|
||||
this.fieldTypeRef.set(fieldType); // now reset ref once extra settings have been initialized
|
||||
|
||||
this.multiFields = multiFields;
|
||||
this.copyTo = copyTo;
|
||||
}
|
||||
|
||||
protected boolean defaultDocValues() {
|
||||
if (indexCreatedBefore2x) {
|
||||
return false;
|
||||
} else {
|
||||
return fieldType().tokenized() == false && fieldType().indexOptions() != IndexOptions.NONE;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return fieldType().names().fullName();
|
||||
}
|
||||
|
||||
public abstract MappedFieldType defaultFieldType();
|
||||
|
||||
public abstract FieldDataType defaultFieldDataType();
|
||||
|
||||
@Override
|
||||
public MappedFieldType fieldType() {
|
||||
return fieldTypeRef.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldTypeReference fieldTypeReference() {
|
||||
return fieldTypeRef;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFieldTypeReference(MappedFieldTypeReference ref) {
|
||||
if (ref.get().equals(fieldType()) == false) {
|
||||
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
|
||||
}
|
||||
ref.incrementAssociatedMappers();
|
||||
this.fieldTypeRef = ref;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CopyTo copyTo() {
|
||||
return copyTo;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
final List<Field> fields = new ArrayList<>(2);
|
||||
try {
|
||||
parseCreateField(context, fields);
|
||||
for (Field field : fields) {
|
||||
if (!customBoost()) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
context.doc().add(field);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e);
|
||||
}
|
||||
multiFields.parse(this, context);
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the field value and populate <code>fields</code>.
|
||||
*/
|
||||
protected abstract void parseCreateField(ParseContext context, List<Field> fields) throws IOException;
|
||||
|
||||
/**
|
||||
* Derived classes can override it to specify that boost value is set by derived classes.
|
||||
*/
|
||||
protected boolean customBoost() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Iterator<Mapper> iterator() {
|
||||
if (multiFields == null) {
|
||||
return Collections.emptyIterator();
|
||||
}
|
||||
return multiFields.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
String mergedType = mergeWith.getClass().getSimpleName();
|
||||
if (mergeWith instanceof AbstractFieldMapper) {
|
||||
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
|
||||
}
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
|
||||
// different types, return
|
||||
return;
|
||||
}
|
||||
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
|
||||
List<String> subConflicts = new ArrayList<>(); // TODO: just expose list from MergeResult?
|
||||
fieldType().checkTypeName(fieldMergeWith.fieldType(), subConflicts);
|
||||
if (subConflicts.isEmpty() == false) {
|
||||
// return early if field types don't match
|
||||
assert subConflicts.size() == 1;
|
||||
mergeResult.addConflict(subConflicts.get(0));
|
||||
return;
|
||||
}
|
||||
|
||||
boolean strict = this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false;
|
||||
fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts, strict);
|
||||
for (String conflict : subConflicts) {
|
||||
mergeResult.addConflict(conflict);
|
||||
}
|
||||
multiFields.merge(mergeWith, mergeResult);
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
// apply changeable values
|
||||
MappedFieldType fieldType = fieldMergeWith.fieldType().clone();
|
||||
fieldType.freeze();
|
||||
fieldTypeRef.set(fieldType);
|
||||
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
|
||||
this.copyTo = fieldMergeWith.copyTo;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(simpleName());
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
doXContentBody(builder, includeDefaults, params);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
|
||||
builder.field("type", contentType());
|
||||
if (indexCreatedBefore2x && (includeDefaults || !simpleName().equals(fieldType().names().originalIndexName()))) {
|
||||
builder.field("index_name", fieldType().names().originalIndexName());
|
||||
}
|
||||
|
||||
if (includeDefaults || fieldType().boost() != 1.0f) {
|
||||
builder.field("boost", fieldType().boost());
|
||||
}
|
||||
|
||||
FieldType defaultFieldType = defaultFieldType();
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE;
|
||||
if (includeDefaults || indexed != defaultIndexed ||
|
||||
fieldType().tokenized() != defaultFieldType.tokenized()) {
|
||||
builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
|
||||
}
|
||||
if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
doXContentDocValues(builder, includeDefaults);
|
||||
if (includeDefaults || fieldType().storeTermVectors() != defaultFieldType.storeTermVectors()) {
|
||||
builder.field("term_vector", termVectorOptionsToString(fieldType()));
|
||||
}
|
||||
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms() || fieldType().normsLoading() != null) {
|
||||
builder.startObject("norms");
|
||||
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms()) {
|
||||
builder.field("enabled", !fieldType().omitNorms());
|
||||
}
|
||||
if (fieldType().normsLoading() != null) {
|
||||
builder.field(MappedFieldType.Loading.KEY, fieldType().normsLoading());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
if (indexed && (includeDefaults || fieldType().indexOptions() != defaultFieldType.indexOptions())) {
|
||||
builder.field("index_options", indexOptionToString(fieldType().indexOptions()));
|
||||
}
|
||||
|
||||
doXContentAnalyzers(builder, includeDefaults);
|
||||
|
||||
if (fieldType().similarity() != null) {
|
||||
builder.field("similarity", fieldType().similarity().name());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY);
|
||||
}
|
||||
|
||||
TreeMap<String, Object> orderedFielddataSettings = new TreeMap<>();
|
||||
if (hasCustomFieldDataSettings()) {
|
||||
orderedFielddataSettings.putAll(customFieldDataSettings.getAsMap());
|
||||
builder.field("fielddata", orderedFielddataSettings);
|
||||
} else if (includeDefaults) {
|
||||
orderedFielddataSettings.putAll(fieldType().fieldDataType().getSettings().getAsMap());
|
||||
builder.field("fielddata", orderedFielddataSettings);
|
||||
}
|
||||
multiFields.toXContent(builder, params);
|
||||
|
||||
if (copyTo != null) {
|
||||
copyTo.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
||||
protected void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
if (fieldType().indexAnalyzer() == null) {
|
||||
if (includeDefaults) {
|
||||
builder.field("analyzer", "default");
|
||||
}
|
||||
} else if (includeDefaults || fieldType().indexAnalyzer().name().startsWith("_") == false && fieldType().indexAnalyzer().name().equals("default") == false) {
|
||||
builder.field("analyzer", fieldType().indexAnalyzer().name());
|
||||
if (fieldType().searchAnalyzer().name().equals(fieldType().indexAnalyzer().name()) == false) {
|
||||
builder.field("search_analyzer", fieldType().searchAnalyzer().name());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void doXContentDocValues(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
if (includeDefaults || hasDefaultDocValues == false) {
|
||||
builder.field(DOC_VALUES, fieldType().hasDocValues());
|
||||
}
|
||||
}
|
||||
|
||||
protected static String indexOptionToString(IndexOptions indexOption) {
|
||||
switch (indexOption) {
|
||||
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
|
||||
return TypeParsers.INDEX_OPTIONS_OFFSETS;
|
||||
case DOCS_AND_FREQS:
|
||||
return TypeParsers.INDEX_OPTIONS_FREQS;
|
||||
case DOCS_AND_FREQS_AND_POSITIONS:
|
||||
return TypeParsers.INDEX_OPTIONS_POSITIONS;
|
||||
case DOCS:
|
||||
return TypeParsers.INDEX_OPTIONS_DOCS;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown IndexOptions [" + indexOption + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public static String termVectorOptionsToString(FieldType fieldType) {
|
||||
if (!fieldType.storeTermVectors()) {
|
||||
return "no";
|
||||
} else if (!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
|
||||
return "yes";
|
||||
} else if (fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
|
||||
return "with_offsets";
|
||||
} else {
|
||||
StringBuilder builder = new StringBuilder("with");
|
||||
if (fieldType.storeTermVectorPositions()) {
|
||||
builder.append("_positions");
|
||||
}
|
||||
if (fieldType.storeTermVectorOffsets()) {
|
||||
builder.append("_offsets");
|
||||
}
|
||||
if (fieldType.storeTermVectorPayloads()) {
|
||||
builder.append("_payloads");
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
|
||||
protected static String indexTokenizeOptionToString(boolean indexed, boolean tokenized) {
|
||||
if (!indexed) {
|
||||
return "no";
|
||||
} else if (tokenized) {
|
||||
return "analyzed";
|
||||
} else {
|
||||
return "not_analyzed";
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean hasCustomFieldDataSettings() {
|
||||
return customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
|
||||
}
|
||||
|
||||
protected abstract String contentType();
|
||||
|
||||
public static class MultiFields {
|
||||
|
||||
public static MultiFields empty() {
|
||||
return new MultiFields(Defaults.PATH_TYPE, ImmutableOpenMap.<String, FieldMapper>of());
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
|
||||
private ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
public Builder pathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder add(Mapper.Builder builder) {
|
||||
mapperBuilders.put(builder.name(), builder);
|
||||
return this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public MultiFields build(AbstractFieldMapper.Builder mainFieldBuilder, BuilderContext context) {
|
||||
if (pathType == Defaults.PATH_TYPE && mapperBuilders.isEmpty()) {
|
||||
return empty();
|
||||
} else if (mapperBuilders.isEmpty()) {
|
||||
return new MultiFields(pathType, ImmutableOpenMap.<String, FieldMapper>of());
|
||||
} else {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
context.path().add(mainFieldBuilder.name());
|
||||
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
|
||||
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
|
||||
String key = cursor.key;
|
||||
Mapper.Builder value = cursor.value;
|
||||
Mapper mapper = value.build(context);
|
||||
assert mapper instanceof FieldMapper;
|
||||
mapperBuilders.put(key, mapper);
|
||||
}
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> mappers = mapperBuilders.cast();
|
||||
return new MultiFields(pathType, mappers.build());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
private volatile ImmutableOpenMap<String, FieldMapper> mappers;
|
||||
|
||||
public MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, FieldMapper> mappers) {
|
||||
this.pathType = pathType;
|
||||
this.mappers = mappers;
|
||||
// we disable the all in multi-field mappers
|
||||
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
|
||||
FieldMapper mapper = cursor.value;
|
||||
if (mapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mapper).unsetIncludeInAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void parse(AbstractFieldMapper mainField, ParseContext context) throws IOException {
|
||||
// TODO: multi fields are really just copy fields, we just need to expose "sub fields" or something that can be part of the mappings
|
||||
if (mappers.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
context = context.createMultiFieldContext();
|
||||
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
context.path().add(mainField.simpleName());
|
||||
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
|
||||
cursor.value.parse(context);
|
||||
}
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
}
|
||||
|
||||
// No need for locking, because locking is taken care of in ObjectMapper#merge and DocumentMapper#merge
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
AbstractFieldMapper mergeWithMultiField = (AbstractFieldMapper) mergeWith;
|
||||
|
||||
List<FieldMapper> newFieldMappers = null;
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = null;
|
||||
|
||||
for (ObjectCursor<FieldMapper> cursor : mergeWithMultiField.multiFields.mappers.values()) {
|
||||
FieldMapper mergeWithMapper = cursor.value;
|
||||
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.simpleName());
|
||||
if (mergeIntoMapper == null) {
|
||||
// no mapping, simply add it if not simulating
|
||||
if (!mergeResult.simulate()) {
|
||||
// we disable the all in multi-field mappers
|
||||
if (mergeWithMapper instanceof AllFieldMapper.IncludeInAll) {
|
||||
((AllFieldMapper.IncludeInAll) mergeWithMapper).unsetIncludeInAll();
|
||||
}
|
||||
if (newMappersBuilder == null) {
|
||||
newMappersBuilder = ImmutableOpenMap.builder(mappers);
|
||||
}
|
||||
newMappersBuilder.put(mergeWithMapper.simpleName(), mergeWithMapper);
|
||||
if (mergeWithMapper instanceof AbstractFieldMapper) {
|
||||
if (newFieldMappers == null) {
|
||||
newFieldMappers = new ArrayList<>(2);
|
||||
}
|
||||
newFieldMappers.add(mergeWithMapper);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
mergeIntoMapper.merge(mergeWithMapper, mergeResult);
|
||||
}
|
||||
}
|
||||
|
||||
// first add all field mappers
|
||||
if (newFieldMappers != null) {
|
||||
mergeResult.addFieldMappers(newFieldMappers);
|
||||
}
|
||||
// now publish mappers
|
||||
if (newMappersBuilder != null) {
|
||||
mappers = newMappersBuilder.build();
|
||||
}
|
||||
}
|
||||
|
||||
public Iterator<Mapper> iterator() {
|
||||
return Iterators.transform(mappers.values().iterator(), new Function<ObjectCursor<FieldMapper>, Mapper>() {
|
||||
@Override
|
||||
public Mapper apply(@Nullable ObjectCursor<FieldMapper> cursor) {
|
||||
return cursor.value;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (!mappers.isEmpty()) {
|
||||
// sort the mappers so we get consistent serialization format
|
||||
Mapper[] sortedMappers = mappers.values().toArray(Mapper.class);
|
||||
Arrays.sort(sortedMappers, new Comparator<Mapper>() {
|
||||
@Override
|
||||
public int compare(Mapper o1, Mapper o2) {
|
||||
return o1.name().compareTo(o2.name());
|
||||
}
|
||||
});
|
||||
builder.startObject("fields");
|
||||
for (Mapper mapper : sortedMappers) {
|
||||
mapper.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a list of fields with optional boost factor where the current field should be copied to
|
||||
*/
|
||||
public static class CopyTo {
|
||||
|
||||
private final ImmutableList<String> copyToFields;
|
||||
|
||||
private CopyTo(ImmutableList<String> copyToFields) {
|
||||
this.copyToFields = copyToFields;
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (!copyToFields.isEmpty()) {
|
||||
builder.startArray("copy_to");
|
||||
for (String field : copyToFields) {
|
||||
builder.value(field);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private final ImmutableList.Builder<String> copyToBuilders = ImmutableList.builder();
|
||||
|
||||
public Builder add(String field) {
|
||||
copyToBuilders.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
public CopyTo build() {
|
||||
return new CopyTo(copyToBuilders.build());
|
||||
}
|
||||
}
|
||||
|
||||
public List<String> copyToFields() {
|
||||
return copyToFields;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns if this field is only generated when indexing. For example, the field of type token_count
|
||||
*/
|
||||
@Override
|
||||
public boolean isGenerated() {
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.ElasticsearchException;
|
|||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Base64;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -36,7 +35,7 @@ import org.elasticsearch.common.compress.CompressorFactory;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -54,14 +53,14 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class BinaryFieldMapper extends AbstractFieldMapper {
|
||||
public class BinaryFieldMapper extends FieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "binary";
|
||||
private static final ParseField COMPRESS = new ParseField("compress").withAllDeprecated("no replacement, implemented at the codec level");
|
||||
private static final ParseField COMPRESS_THRESHOLD = new ParseField("compress_threshold").withAllDeprecated("no replacement");
|
||||
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new BinaryFieldType();
|
||||
|
||||
static {
|
||||
|
@ -70,7 +69,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, BinaryFieldMapper> {
|
||||
public static class Builder extends FieldMapper.Builder<Builder, BinaryFieldMapper> {
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
|
@ -81,8 +80,8 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
|
|||
public BinaryFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
((BinaryFieldType)fieldType).setTryUncompressing(context.indexCreatedVersion().before(Version.V_2_0_0));
|
||||
return new BinaryFieldMapper(name, fieldType, docValues,
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
return new BinaryFieldMapper(name, fieldType, defaultFieldType,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,19 +180,9 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected BinaryFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("binary");
|
||||
protected BinaryFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,18 +24,15 @@ import org.apache.lucene.document.SortedNumericDocValuesField;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -50,11 +47,11 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
|||
/**
|
||||
* A field mapper for boolean fields.
|
||||
*/
|
||||
public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
public class BooleanFieldMapper extends FieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "boolean";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new BooleanFieldType();
|
||||
|
||||
static {
|
||||
|
@ -72,7 +69,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
|||
public final static BytesRef FALSE = new BytesRef("F");
|
||||
}
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, BooleanFieldMapper> {
|
||||
public static class Builder extends FieldMapper.Builder<Builder, BooleanFieldMapper> {
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
|
@ -90,8 +87,8 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
|||
@Override
|
||||
public BooleanFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new BooleanFieldMapper(name, fieldType, docValues,
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
return new BooleanFieldMapper(name, fieldType, defaultFieldType,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -194,9 +191,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected BooleanFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
protected BooleanFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -204,17 +201,6 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
|||
return (BooleanFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
// TODO have a special boolean type?
|
||||
return new FieldDataType(CONTENT_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) {
|
||||
|
|
|
@ -38,12 +38,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
|
@ -81,8 +78,8 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
@Override
|
||||
public ByteFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
ByteFieldMapper fieldMapper = new ByteFieldMapper(name, fieldType, docValues, ignoreMalformed(context),
|
||||
coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
ByteFieldMapper fieldMapper = new ByteFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
@ -174,8 +171,8 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
byte iValue = Byte.parseByte(value);
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
byte iValue = parseValue(value);
|
||||
byte iSim = fuzziness.asByte();
|
||||
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
|
@ -193,10 +190,10 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected ByteFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
protected ByteFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -204,16 +201,6 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
return (ByteFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("byte");
|
||||
}
|
||||
|
||||
private static byte parseValue(Object value) {
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).byteValue();
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentParser.NumberType;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperException;
|
||||
|
@ -66,11 +66,11 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
public class CompletionFieldMapper extends FieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "completion";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final CompletionFieldType FIELD_TYPE = new CompletionFieldType();
|
||||
|
||||
static {
|
||||
|
@ -104,7 +104,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
|||
public static final Set<String> ALLOWED_CONTENT_FIELD_NAMES = Sets.newHashSet(Fields.CONTENT_FIELD_NAME_INPUT,
|
||||
Fields.CONTENT_FIELD_NAME_OUTPUT, Fields.CONTENT_FIELD_NAME_PAYLOAD, Fields.CONTENT_FIELD_NAME_WEIGHT, Fields.CONTEXT);
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, CompletionFieldMapper> {
|
||||
public static class Builder extends FieldMapper.Builder<Builder, CompletionFieldMapper> {
|
||||
|
||||
private boolean preserveSeparators = Defaults.DEFAULT_PRESERVE_SEPARATORS;
|
||||
private boolean payloads = Defaults.DEFAULT_HAS_PAYLOADS;
|
||||
|
@ -226,7 +226,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
|||
private AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider;
|
||||
private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING;
|
||||
|
||||
public CompletionFieldType() {}
|
||||
public CompletionFieldType() {
|
||||
setFieldDataType(null);
|
||||
}
|
||||
|
||||
protected CompletionFieldType(CompletionFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -312,7 +314,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
|||
private int maxInputLength;
|
||||
|
||||
public CompletionFieldMapper(String simpleName, MappedFieldType fieldType, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, false, null, indexSettings, multiFields, copyTo);
|
||||
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo);
|
||||
this.maxInputLength = maxInputLength;
|
||||
}
|
||||
|
||||
|
@ -538,16 +540,6 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean isStoringPayloads() {
|
||||
return fieldType().analyzingSuggestLookupProvider.hasPayloads();
|
||||
}
|
||||
|
|
|
@ -69,7 +69,8 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
public static final String CONTENT_TYPE = "date";
|
||||
|
||||
public static class Defaults extends NumberFieldMapper.Defaults {
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime||epoch_millis", Locale.ROOT);
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strictDateOptionalTime||epoch_millis", Locale.ROOT);
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER_BEFORE_2_0 = Joda.forPattern("dateOptionalTime", Locale.ROOT);
|
||||
public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS;
|
||||
public static final DateFieldType FIELD_TYPE = new DateFieldType();
|
||||
|
||||
|
@ -93,7 +94,8 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
locale = Locale.ROOT;
|
||||
}
|
||||
|
||||
DateFieldType fieldType() {
|
||||
@Override
|
||||
public DateFieldType fieldType() {
|
||||
return (DateFieldType)fieldType;
|
||||
}
|
||||
|
||||
|
@ -116,23 +118,20 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
public DateFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
fieldType.setNullValue(nullValue);
|
||||
DateFieldMapper fieldMapper = new DateFieldMapper(name, fieldType,
|
||||
docValues, ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
DateFieldMapper fieldMapper = new DateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
|
||||
// TODO MOVE ME OUTSIDE OF THIS SPACE?
|
||||
if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0)) {
|
||||
boolean includesEpochFormatter = dateTimeFormatter.format().contains("epoch_");
|
||||
if (!includesEpochFormatter) {
|
||||
String format = fieldType().timeUnit().equals(TimeUnit.SECONDS) ? "epoch_second" : "epoch_millis";
|
||||
fieldType().setDateTimeFormatter(Joda.forPattern(format + "||" + dateTimeFormatter.format()));
|
||||
}
|
||||
if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0) &&
|
||||
!fieldType().dateTimeFormatter().format().contains("epoch_")) {
|
||||
String format = fieldType().timeUnit().equals(TimeUnit.SECONDS) ? "epoch_second" : "epoch_millis";
|
||||
fieldType().setDateTimeFormatter(Joda.forPattern(format + "||" + fieldType().dateTimeFormatter().format()));
|
||||
}
|
||||
|
||||
FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
|
||||
if (!locale.equals(dateTimeFormatter.locale())) {
|
||||
fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale));
|
||||
}
|
||||
|
@ -160,6 +159,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
DateFieldMapper.Builder builder = dateField(name);
|
||||
parseNumberField(builder, name, node, parserContext);
|
||||
boolean configuredFormat = false;
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
@ -172,6 +172,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
iterator.remove();
|
||||
} else if (propName.equals("format")) {
|
||||
builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
|
||||
configuredFormat = true;
|
||||
iterator.remove();
|
||||
} else if (propName.equals("numeric_resolution")) {
|
||||
builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT)));
|
||||
|
@ -181,6 +182,13 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
iterator.remove();
|
||||
}
|
||||
}
|
||||
if (!configuredFormat) {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
|
||||
builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER);
|
||||
} else {
|
||||
builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER_BEFORE_2_0);
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
@ -259,6 +267,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
|
||||
public DateFieldType() {
|
||||
super(NumericType.LONG);
|
||||
setFieldDataType(new FieldDataType("long"));
|
||||
}
|
||||
|
||||
protected DateFieldType(DateFieldType ref) {
|
||||
|
@ -386,8 +395,8 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = dateMathParser().parse(value, now());
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = parseValue(value);
|
||||
long iSim;
|
||||
try {
|
||||
iSim = fuzziness.asTimeValue().millis();
|
||||
|
@ -436,9 +445,9 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected DateFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues, Explicit<Boolean> ignoreMalformed,Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
protected DateFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit<Boolean> ignoreMalformed,Explicit<Boolean> coerce,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -446,16 +455,6 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
return (DateFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("long");
|
||||
}
|
||||
|
||||
private static Callable<Long> now() {
|
||||
return new Callable<Long>() {
|
||||
@Override
|
||||
|
|
|
@ -20,11 +20,9 @@
|
|||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import com.carrotsearch.hppc.DoubleArrayList;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType.NumericType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
|
@ -44,12 +42,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
|
@ -88,8 +83,8 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
@Override
|
||||
public DoubleFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
DoubleFieldMapper fieldMapper = new DoubleFieldMapper(name, fieldType, docValues, ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
DoubleFieldMapper fieldMapper = new DoubleFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
@ -182,8 +177,8 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
double iValue = Double.parseDouble(value);
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
double iValue = parseDoubleValue(value);
|
||||
double iSim = fuzziness.asDouble();
|
||||
return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
|
@ -201,9 +196,9 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected DoubleFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
protected DoubleFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -211,16 +206,6 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
return (DoubleFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("double");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
|
|
|
@ -20,11 +20,9 @@
|
|||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType.NumericType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
|
@ -45,12 +43,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
|
@ -89,8 +84,8 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
@Override
|
||||
public FloatFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
FloatFieldMapper fieldMapper = new FloatFieldMapper(name, fieldType, docValues, ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
FloatFieldMapper fieldMapper = new FloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
@ -183,8 +178,8 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
float iValue = Float.parseFloat(value);
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
float iValue = parseValue(value);
|
||||
final float iSim = fuzziness.asFloat();
|
||||
return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
|
@ -202,10 +197,10 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected FloatFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
protected FloatFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -213,16 +208,6 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
return (FloatFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("float");
|
||||
}
|
||||
|
||||
private static float parseValue(Object value) {
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).floatValue();
|
||||
|
|
|
@ -40,13 +40,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
|
@ -89,8 +85,8 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
@Override
|
||||
public IntegerFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
IntegerFieldMapper fieldMapper = new IntegerFieldMapper(name, fieldType, docValues,
|
||||
ignoreMalformed(context), coerce(context), fieldDataSettings,
|
||||
IntegerFieldMapper fieldMapper = new IntegerFieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
|
@ -145,7 +141,8 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
// TODO: this should be the same as the mapper type name, except fielddata expects int...
|
||||
return "int";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -183,8 +180,8 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
int iValue = Integer.parseInt(value);
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
int iValue = parseValue(value);
|
||||
int iSim = fuzziness.asInt();
|
||||
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
|
@ -202,11 +199,10 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected IntegerFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
protected IntegerFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -214,16 +210,6 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
return (IntegerFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("int");
|
||||
}
|
||||
|
||||
private static int parseValue(Object value) {
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).intValue();
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType.NumericType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
|
@ -41,12 +40,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
|
@ -89,8 +85,8 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
@Override
|
||||
public LongFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
LongFieldMapper fieldMapper = new LongFieldMapper(name, fieldType, docValues,
|
||||
ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
LongFieldMapper fieldMapper = new LongFieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
@ -182,8 +178,8 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = Long.parseLong(value);
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = parseLongValue(value);
|
||||
final long iSim = fuzziness.asLong();
|
||||
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
|
@ -201,11 +197,10 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected LongFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
protected LongFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -213,16 +208,6 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
return (LongFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("long");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
|
|
|
@ -23,17 +23,14 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -61,9 +58,9 @@ public class Murmur3FieldMapper extends LongFieldMapper {
|
|||
@Override
|
||||
public Murmur3FieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(name, fieldType, docValues,
|
||||
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
@ -119,12 +116,10 @@ public class Murmur3FieldMapper extends LongFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected Murmur3FieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
protected Murmur3FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce,
|
||||
fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import com.carrotsearch.hppc.LongArrayList;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.NumericTokenStream;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
|
@ -36,19 +35,13 @@ import org.apache.lucene.store.ByteArrayDataOutput;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.util.ByteUtils;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -58,9 +51,9 @@ import java.util.List;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class NumberFieldMapper extends AbstractFieldMapper implements AllFieldMapper.IncludeInAll {
|
||||
public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
|
||||
public static final int PRECISION_STEP_8_BIT = Integer.MAX_VALUE; // 1tpv: 256 terms at most, not useful
|
||||
public static final int PRECISION_STEP_16_BIT = 8; // 2tpv
|
||||
|
@ -71,7 +64,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
|
|||
public static final Explicit<Boolean> COERCE = new Explicit<>(true, false);
|
||||
}
|
||||
|
||||
public abstract static class Builder<T extends Builder, Y extends NumberFieldMapper> extends AbstractFieldMapper.Builder<T, Y> {
|
||||
public abstract static class Builder<T extends Builder, Y extends NumberFieldMapper> extends FieldMapper.Builder<T, Y> {
|
||||
|
||||
private Boolean ignoreMalformed;
|
||||
|
||||
|
@ -158,7 +151,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
|
|||
}
|
||||
|
||||
@Override
|
||||
public abstract Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions);
|
||||
public abstract Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions);
|
||||
|
||||
@Override
|
||||
public boolean useTermQueryWithQueryString() {
|
||||
|
@ -185,11 +178,10 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
|
|||
*/
|
||||
protected final boolean useSortedNumericDocValues;
|
||||
|
||||
protected NumberFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, @Nullable Settings fieldDataSettings, Settings indexSettings,
|
||||
protected NumberFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, Settings indexSettings,
|
||||
MultiFields multiFields, CopyTo copyTo) {
|
||||
// LUCENE 4 UPGRADE: Since we can't do anything before the super call, we have to push the boost check down to subclasses
|
||||
super(simpleName, fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
this.coerce = coerce;
|
||||
this.useSortedNumericDocValues = Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1);
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType.NumericType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
|
@ -41,12 +40,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
|
@ -85,8 +81,8 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
@Override
|
||||
public ShortFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
ShortFieldMapper fieldMapper = new ShortFieldMapper(name, fieldType, docValues,
|
||||
ignoreMalformed(context), coerce(context), fieldDataSettings,
|
||||
ShortFieldMapper fieldMapper = new ShortFieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
|
@ -180,8 +176,8 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
short iValue = Short.parseShort(value);
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
short iValue = parseValue(value);
|
||||
short iSim = fuzziness.asShort();
|
||||
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
|
@ -199,12 +195,10 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected ShortFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
protected ShortFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce,
|
||||
fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -212,16 +206,6 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
return (ShortFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("short");
|
||||
}
|
||||
|
||||
private static short parseValue(Object value) {
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).shortValue();
|
||||
|
|
|
@ -24,14 +24,13 @@ import org.apache.lucene.document.SortedSetDocValuesField;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -50,14 +49,11 @@ import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
|
|||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMapper.IncludeInAll {
|
||||
public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
|
||||
|
||||
public static final String CONTENT_TYPE = "string";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new StringFieldType();
|
||||
|
||||
static {
|
||||
|
@ -70,7 +66,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
|||
public static final int IGNORE_ABOVE = -1;
|
||||
}
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, StringFieldMapper> {
|
||||
public static class Builder extends FieldMapper.Builder<Builder, StringFieldMapper> {
|
||||
|
||||
protected String nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
|
@ -116,22 +112,20 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
|||
// if they are set explicitly, we will use those values
|
||||
// we also change the values on the default field type so that toXContent emits what
|
||||
// differs from the defaults
|
||||
MappedFieldType defaultFieldType = Defaults.FIELD_TYPE.clone();
|
||||
if (fieldType.indexOptions() != IndexOptions.NONE && !fieldType.tokenized()) {
|
||||
defaultFieldType.setOmitNorms(true);
|
||||
defaultFieldType.setIndexOptions(IndexOptions.DOCS);
|
||||
if (!omitNormsSet && fieldType.boost() == Defaults.BOOST) {
|
||||
if (!omitNormsSet && fieldType.boost() == 1.0f) {
|
||||
fieldType.setOmitNorms(true);
|
||||
}
|
||||
if (!indexOptionsSet) {
|
||||
fieldType.setIndexOptions(IndexOptions.DOCS);
|
||||
}
|
||||
}
|
||||
defaultFieldType.freeze();
|
||||
setupFieldType(context);
|
||||
StringFieldMapper fieldMapper = new StringFieldMapper(
|
||||
name, fieldType, defaultFieldType, docValues, positionOffsetGap, ignoreAbove,
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
name, fieldType, defaultFieldType, positionOffsetGap, ignoreAbove,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
@ -163,14 +157,14 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
|||
builder.positionOffsetGap(XContentMapValues.nodeIntegerValue(propNode, -1));
|
||||
// we need to update to actual analyzers if they are not set in this case...
|
||||
// so we can inject the position offset gap...
|
||||
if (builder.fieldType.indexAnalyzer() == null) {
|
||||
builder.fieldType.setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
|
||||
if (builder.fieldType().indexAnalyzer() == null) {
|
||||
builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
|
||||
}
|
||||
if (builder.fieldType.searchAnalyzer() == null) {
|
||||
builder.fieldType.setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
|
||||
if (builder.fieldType().searchAnalyzer() == null) {
|
||||
builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
|
||||
}
|
||||
if (builder.fieldType.searchQuoteAnalyzer() == null) {
|
||||
builder.fieldType.setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
|
||||
if (builder.fieldType().searchQuoteAnalyzer() == null) {
|
||||
builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
|
||||
}
|
||||
iterator.remove();
|
||||
} else if (propName.equals("ignore_above")) {
|
||||
|
@ -221,30 +215,18 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
|||
private Boolean includeInAll;
|
||||
private int positionOffsetGap;
|
||||
private int ignoreAbove;
|
||||
private final MappedFieldType defaultFieldType;
|
||||
|
||||
protected StringFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Boolean docValues,
|
||||
int positionOffsetGap, int ignoreAbove, @Nullable Settings fieldDataSettings,
|
||||
protected StringFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
int positionOffsetGap, int ignoreAbove,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) {
|
||||
throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values");
|
||||
}
|
||||
this.defaultFieldType = defaultFieldType;
|
||||
this.positionOffsetGap = positionOffsetGap;
|
||||
this.ignoreAbove = ignoreAbove;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return defaultFieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("string");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll != null) {
|
||||
|
|
|
@ -78,8 +78,8 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
|||
@Override
|
||||
public TokenCountFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(name, fieldType, docValues,
|
||||
ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(),
|
||||
TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(name, fieldType, defaultFieldType,
|
||||
ignoreMalformed(context), coerce(context), context.indexSettings(),
|
||||
analyzer, multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
|
@ -127,10 +127,9 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
|||
|
||||
private NamedAnalyzer analyzer;
|
||||
|
||||
protected TokenCountFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce, Settings fieldDataSettings, Settings indexSettings,
|
||||
NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
protected TokenCountFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce, Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
this.analyzer = analyzer;
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.loader.SettingsLoader;
|
|||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -60,8 +61,8 @@ public class TypeParsers {
|
|||
@Override
|
||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
ContentPath.Type pathType = null;
|
||||
AbstractFieldMapper.Builder mainFieldBuilder = null;
|
||||
List<AbstractFieldMapper.Builder> fields = null;
|
||||
FieldMapper.Builder mainFieldBuilder = null;
|
||||
List<FieldMapper.Builder> fields = null;
|
||||
String firstType = null;
|
||||
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
|
@ -94,13 +95,13 @@ public class TypeParsers {
|
|||
throw new MapperParsingException("no handler for type [" + type + "] declared on field [" + fieldName + "]");
|
||||
}
|
||||
if (propName.equals(name)) {
|
||||
mainFieldBuilder = (AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
|
||||
mainFieldBuilder = (FieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
|
||||
fieldsIterator.remove();
|
||||
} else {
|
||||
if (fields == null) {
|
||||
fields = new ArrayList<>(2);
|
||||
}
|
||||
fields.add((AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext));
|
||||
fields.add((FieldMapper.Builder) typeParser.parse(propName, propNode, parserContext));
|
||||
fieldsIterator.remove();
|
||||
}
|
||||
}
|
||||
|
@ -121,8 +122,8 @@ public class TypeParsers {
|
|||
mainFieldBuilder = new StringFieldMapper.Builder(name).index(false);
|
||||
} else {
|
||||
Mapper.Builder substitute = typeParser.parse(name, Collections.<String, Object>emptyMap(), parserContext);
|
||||
if (substitute instanceof AbstractFieldMapper.Builder) {
|
||||
mainFieldBuilder = ((AbstractFieldMapper.Builder) substitute).index(false);
|
||||
if (substitute instanceof FieldMapper.Builder) {
|
||||
mainFieldBuilder = ((FieldMapper.Builder) substitute).index(false);
|
||||
} else {
|
||||
// The first multi isn't a core field type
|
||||
mainFieldBuilder = new StringFieldMapper.Builder(name).index(false);
|
||||
|
@ -180,9 +181,9 @@ public class TypeParsers {
|
|||
}
|
||||
}
|
||||
|
||||
public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
NamedAnalyzer indexAnalyzer = builder.fieldType.indexAnalyzer();
|
||||
NamedAnalyzer searchAnalyzer = builder.fieldType.searchAnalyzer();
|
||||
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer();
|
||||
NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer();
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
final String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
@ -299,7 +300,7 @@ public class TypeParsers {
|
|||
builder.searchAnalyzer(searchAnalyzer);
|
||||
}
|
||||
|
||||
public static boolean parseMultiField(AbstractFieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
|
||||
public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
|
||||
if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
|
||||
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
|
||||
return true;
|
||||
|
@ -367,7 +368,7 @@ public class TypeParsers {
|
|||
return Joda.forPattern(node.toString());
|
||||
}
|
||||
|
||||
public static void parseTermVector(String fieldName, String termVector, AbstractFieldMapper.Builder builder) throws MapperParsingException {
|
||||
public static void parseTermVector(String fieldName, String termVector, FieldMapper.Builder builder) throws MapperParsingException {
|
||||
termVector = Strings.toUnderscoreCase(termVector);
|
||||
if ("no".equals(termVector)) {
|
||||
builder.storeTermVectors(false);
|
||||
|
@ -392,7 +393,7 @@ public class TypeParsers {
|
|||
}
|
||||
}
|
||||
|
||||
public static void parseIndex(String fieldName, String index, AbstractFieldMapper.Builder builder) throws MapperParsingException {
|
||||
public static void parseIndex(String fieldName, String index, FieldMapper.Builder builder) throws MapperParsingException {
|
||||
index = Strings.toUnderscoreCase(index);
|
||||
if ("no".equals(index)) {
|
||||
builder.index(false);
|
||||
|
@ -429,8 +430,8 @@ public class TypeParsers {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void parseCopyFields(Object propNode, AbstractFieldMapper.Builder builder) {
|
||||
AbstractFieldMapper.CopyTo.Builder copyToBuilder = new AbstractFieldMapper.CopyTo.Builder();
|
||||
public static void parseCopyFields(Object propNode, FieldMapper.Builder builder) {
|
||||
FieldMapper.CopyTo.Builder copyToBuilder = new FieldMapper.CopyTo.Builder();
|
||||
if (isArray(propNode)) {
|
||||
for(Object node : (List<Object>) propNode) {
|
||||
copyToBuilder.add(nodeStringValue(node, null));
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
|
@ -39,13 +38,12 @@ import org.elasticsearch.common.util.ByteUtils;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField;
|
||||
|
@ -76,7 +74,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType;
|
|||
* "lon" : 2.1
|
||||
* }
|
||||
*/
|
||||
public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayValueMapperParser {
|
||||
public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser {
|
||||
|
||||
public static final String CONTENT_TYPE = "geo_point";
|
||||
|
||||
|
@ -110,7 +108,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
|
|||
}
|
||||
}
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, GeoPointFieldMapper> {
|
||||
public static class Builder extends FieldMapper.Builder<Builder, GeoPointFieldMapper> {
|
||||
|
||||
private ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
|
@ -129,7 +127,8 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
|
|||
this.builder = this;
|
||||
}
|
||||
|
||||
GeoPointFieldType fieldType() {
|
||||
@Override
|
||||
public GeoPointFieldType fieldType() {
|
||||
return (GeoPointFieldType)fieldType;
|
||||
}
|
||||
|
||||
|
@ -204,10 +203,10 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
|
|||
// this is important: even if geo points feel like they need to be tokenized to distinguish lat from lon, we actually want to
|
||||
// store them as a single token.
|
||||
fieldType.setTokenized(false);
|
||||
fieldType.setHasDocValues(false);
|
||||
setupFieldType(context);
|
||||
|
||||
return new GeoPointFieldMapper(name, fieldType, docValues, fieldDataSettings, context.indexSettings(), origPathType,
|
||||
fieldType.setHasDocValues(false);
|
||||
defaultFieldType.setHasDocValues(false);
|
||||
return new GeoPointFieldMapper(name, fieldType, defaultFieldType, context.indexSettings(), origPathType,
|
||||
latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context));
|
||||
}
|
||||
}
|
||||
|
@ -586,9 +585,9 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
|
|||
|
||||
private final StringFieldMapper geohashMapper;
|
||||
|
||||
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings,
|
||||
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,MultiFields multiFields) {
|
||||
super(simpleName, fieldType, docValues, fieldDataSettings, indexSettings, multiFields, null);
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
|
||||
this.pathType = pathType;
|
||||
this.latMapper = latMapper;
|
||||
this.lonMapper = lonMapper;
|
||||
|
@ -605,21 +604,6 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
|
|||
return (GeoPointFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("geo_point");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean defaultDocValues() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
|
||||
|
|
|
@ -37,12 +37,11 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -69,7 +68,7 @@ import static org.elasticsearch.index.mapper.MapperBuilders.geoShapeField;
|
|||
* ]
|
||||
* }
|
||||
*/
|
||||
public class GeoShapeFieldMapper extends AbstractFieldMapper {
|
||||
public class GeoShapeFieldMapper extends FieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "geo_shape";
|
||||
|
||||
|
@ -107,7 +106,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, GeoShapeFieldMapper> {
|
||||
public static class Builder extends FieldMapper.Builder<Builder, GeoShapeFieldMapper> {
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
|
@ -359,7 +358,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
|
|||
}
|
||||
|
||||
public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, false, null, indexSettings, multiFields, copyTo);
|
||||
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -367,16 +366,6 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
|
|||
return (GeoShapeFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
try {
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
|
@ -39,9 +38,8 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityLookupService;
|
||||
|
||||
|
@ -72,7 +70,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public static final String CONTENT_TYPE = "_all";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = AllFieldMapper.NAME;
|
||||
public static final String INDEX_NAME = AllFieldMapper.NAME;
|
||||
public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_ENABLED;
|
||||
|
@ -111,7 +109,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
fieldType.setTokenized(true);
|
||||
|
||||
return new AllFieldMapper(fieldType, enabled, fieldDataSettings, context.indexSettings());
|
||||
return new AllFieldMapper(fieldType, enabled, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -156,7 +154,9 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
static final class AllFieldType extends MappedFieldType {
|
||||
|
||||
public AllFieldType() {}
|
||||
public AllFieldType() {
|
||||
setFieldDataType(new FieldDataType("string"));
|
||||
}
|
||||
|
||||
protected AllFieldType(AllFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -194,15 +194,11 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
private EnabledAttributeMapper enabledState;
|
||||
|
||||
public AllFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
|
||||
Defaults.ENABLED,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.ENABLED, indexSettings);
|
||||
}
|
||||
|
||||
protected AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(NAME, fieldType, false, fieldDataSettings, indexSettings);
|
||||
protected AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.enabledState = enabled;
|
||||
|
||||
}
|
||||
|
@ -211,16 +207,6 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
return this.enabledState.enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("string");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
@ -316,12 +302,6 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
} else if (includeDefaults) {
|
||||
builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY);
|
||||
}
|
||||
|
||||
if (hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -33,9 +32,8 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -59,7 +57,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public static final String CONTENT_TYPE = "_field_names";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = FieldNamesFieldMapper.NAME;
|
||||
|
||||
public static final boolean ENABLED = true;
|
||||
|
@ -100,9 +98,10 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public FieldNamesFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(false);
|
||||
FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldType)fieldType;
|
||||
fieldNamesFieldType.setEnabled(enabled);
|
||||
return new FieldNamesFieldMapper(fieldType, fieldDataSettings, context.indexSettings());
|
||||
return new FieldNamesFieldMapper(fieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,7 +134,9 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
private boolean enabled = Defaults.ENABLED;
|
||||
|
||||
public FieldNamesFieldType() {}
|
||||
public FieldNamesFieldType() {
|
||||
setFieldDataType(new FieldDataType("string"));
|
||||
}
|
||||
|
||||
protected FieldNamesFieldType(FieldNamesFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -197,18 +198,14 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private final MappedFieldType defaultFieldType;
|
||||
private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled
|
||||
|
||||
public FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), indexSettings);
|
||||
}
|
||||
|
||||
public FieldNamesFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(NAME, fieldType, false, fieldDataSettings, indexSettings);
|
||||
this.defaultFieldType = Defaults.FIELD_TYPE;
|
||||
public FieldNamesFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0);
|
||||
if (this.pre13Index) {
|
||||
FieldNamesFieldType newFieldType = fieldType().clone();
|
||||
|
@ -223,16 +220,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
return (FieldNamesFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return defaultFieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("string");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
|
|
@ -46,10 +46,9 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -69,7 +68,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public static final String CONTENT_TYPE = "_id";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = IdFieldMapper.NAME;
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new IdFieldType();
|
||||
|
@ -108,8 +107,8 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public IdFieldMapper build(BuilderContext context) {
|
||||
fieldType.setNames(new MappedFieldType.Names(indexName, indexName, name));
|
||||
return new IdFieldMapper(fieldType, docValues, path, fieldDataSettings, context.indexSettings());
|
||||
setupFieldType(context);
|
||||
return new IdFieldMapper(fieldType, path, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -136,7 +135,9 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
static final class IdFieldType extends MappedFieldType {
|
||||
|
||||
public IdFieldType() {}
|
||||
public IdFieldType() {
|
||||
setFieldDataType(new FieldDataType("string"));
|
||||
}
|
||||
|
||||
protected IdFieldType(IdFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -228,14 +229,11 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
private final String path;
|
||||
|
||||
public IdFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(idFieldType(indexSettings, existing), null, Defaults.PATH,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
this(idFieldType(indexSettings, existing), Defaults.PATH, indexSettings);
|
||||
}
|
||||
|
||||
protected IdFieldMapper(MappedFieldType fieldType, Boolean docValues, String path,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(NAME, fieldType, docValues, fieldDataSettings, indexSettings);
|
||||
protected IdFieldMapper(MappedFieldType fieldType, String path, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
|
@ -255,16 +253,6 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
return this.path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("string");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
if (context.sourceToParse().id() != null) {
|
||||
|
@ -331,9 +319,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
builder.field("path", path);
|
||||
}
|
||||
|
||||
if (hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
if (includeDefaults || hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.common.lucene.Lucene;
|
|||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -39,7 +38,6 @@ import org.elasticsearch.index.mapper.MergeMappingException;
|
|||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -59,7 +57,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public static final String CONTENT_TYPE = "_index";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = IndexFieldMapper.NAME;
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new IndexFieldType();
|
||||
|
@ -94,8 +92,9 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public IndexFieldMapper build(BuilderContext context) {
|
||||
fieldType.setNames(new MappedFieldType.Names(indexName, indexName, name));
|
||||
return new IndexFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings());
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(false);
|
||||
return new IndexFieldMapper(fieldType, enabledState, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -207,14 +206,11 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
private EnabledAttributeMapper enabledState;
|
||||
|
||||
public IndexFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing,
|
||||
Defaults.ENABLED_STATE,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()), indexSettings);
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing, Defaults.ENABLED_STATE, indexSettings);
|
||||
}
|
||||
|
||||
public IndexFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(NAME, fieldType, false, fieldDataSettings, indexSettings);
|
||||
public IndexFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.enabledState = enabledState;
|
||||
}
|
||||
|
||||
|
@ -222,16 +218,6 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
return this.enabledState.enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType(IndexFieldMapper.NAME);
|
||||
}
|
||||
|
||||
public String value(Document document) {
|
||||
Field field = (Field) document.getField(fieldType().names().indexName());
|
||||
return field == null ? null : (String)fieldType().value(field);
|
||||
|
@ -280,13 +266,8 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
|
||||
builder.field("enabled", enabledState.enabled);
|
||||
}
|
||||
|
||||
if (indexCreatedBefore2x) {
|
||||
if (hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || hasCustomFieldDataSettings())) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
|
|
@ -40,10 +40,9 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -64,7 +63,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
public static final String NAME = "_parent";
|
||||
public static final String CONTENT_TYPE = "_parent";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = ParentFieldMapper.NAME;
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new ParentFieldType();
|
||||
|
@ -77,7 +76,6 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
|
||||
FIELD_TYPE.setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE)));
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
@ -87,7 +85,6 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
protected String indexName;
|
||||
|
||||
private String type;
|
||||
protected Settings fieldDataSettings;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
|
@ -100,18 +97,14 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public Builder fieldDataSettings(Settings settings) {
|
||||
this.fieldDataSettings = settings;
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParentFieldMapper build(BuilderContext context) {
|
||||
if (type == null) {
|
||||
throw new MapperParsingException("[_parent] field mapping must contain the [type] option");
|
||||
}
|
||||
setupFieldType(context);
|
||||
return new ParentFieldMapper(fieldType, type, fieldDataSettings, context.indexSettings());
|
||||
fieldType.setHasDocValues(context.indexCreatedVersion().onOrAfter(Version.V_2_0_0));
|
||||
return new ParentFieldMapper(fieldType, type, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -145,7 +138,9 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
static final class ParentFieldType extends MappedFieldType {
|
||||
|
||||
public ParentFieldType() {}
|
||||
public ParentFieldType() {
|
||||
setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE)));
|
||||
}
|
||||
|
||||
protected ParentFieldType(ParentFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -229,32 +224,25 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
private final String type;
|
||||
|
||||
protected ParentFieldMapper(MappedFieldType fieldType, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(NAME, fieldType, Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0), fieldDataSettings, indexSettings);
|
||||
protected ParentFieldMapper(MappedFieldType fieldType, String type, Settings indexSettings) {
|
||||
super(NAME, setupDocValues(indexSettings, fieldType), setupDocValues(indexSettings, Defaults.FIELD_TYPE), indexSettings);
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public ParentFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
|
||||
null,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), null, indexSettings);
|
||||
}
|
||||
|
||||
static MappedFieldType setupDocValues(Settings indexSettings, MappedFieldType fieldType) {
|
||||
fieldType = fieldType.clone();
|
||||
fieldType.setHasDocValues(Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0));
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.EAGER_VALUE));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
@ -328,9 +316,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
builder.field("type", type);
|
||||
if (hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
if (includeDefaults || hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -33,9 +33,8 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -53,7 +52,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
public static final String NAME = "_routing";
|
||||
public static final String CONTENT_TYPE = "_routing";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = "_routing";
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new RoutingFieldType();
|
||||
|
@ -124,7 +123,9 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
static final class RoutingFieldType extends MappedFieldType {
|
||||
|
||||
public RoutingFieldType() {}
|
||||
public RoutingFieldType() {
|
||||
setFieldDataType(new FieldDataType("string"));
|
||||
}
|
||||
|
||||
protected RoutingFieldType(RoutingFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -157,21 +158,11 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, Settings indexSettings) {
|
||||
super(NAME, fieldType, false, null, indexSettings);
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.required = required;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("string");
|
||||
}
|
||||
|
||||
public void markAsRequired() {
|
||||
this.required = true;
|
||||
}
|
||||
|
|
|
@ -24,19 +24,15 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -83,6 +79,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public SizeFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(false);
|
||||
return new SizeFieldMapper(enabledState, fieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
@ -114,7 +111,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(NAME, fieldType, false, null, indexSettings);
|
||||
super(NAME, fieldType, Defaults.SIZE_FIELD_TYPE, indexSettings);
|
||||
this.enabledState = enabled;
|
||||
|
||||
}
|
||||
|
@ -138,16 +135,6 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
|||
super.parse(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.SIZE_FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("int");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
// nothing to do here, we call the parent in postParse
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -50,7 +49,6 @@ import org.elasticsearch.index.mapper.MergeMappingException;
|
|||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -72,7 +70,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public static final String CONTENT_TYPE = "_source";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = SourceFieldMapper.NAME;
|
||||
public static final boolean ENABLED = true;
|
||||
public static final long COMPRESS_THRESHOLD = -1;
|
||||
|
@ -256,7 +254,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
protected SourceFieldMapper(boolean enabled, String format, Boolean compress, long compressThreshold,
|
||||
String[] includes, String[] excludes, Settings indexSettings) {
|
||||
super(NAME, Defaults.FIELD_TYPE.clone(), false, null, indexSettings); // Only stored.
|
||||
super(NAME, Defaults.FIELD_TYPE.clone(), Defaults.FIELD_TYPE, indexSettings); // Only stored.
|
||||
this.enabled = enabled;
|
||||
this.compress = compress;
|
||||
this.compressThreshold = compressThreshold;
|
||||
|
@ -284,16 +282,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
return complete;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
super.parse(context);
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -29,20 +28,16 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.AlreadyExpiredException;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -101,6 +96,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public TTLFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(false);
|
||||
return new TTLFieldMapper(fieldType, enabledState, defaultTTL, fieldDataSettings, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
@ -167,7 +163,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
protected TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(NAME, fieldType, false, fieldDataSettings, indexSettings);
|
||||
super(NAME, fieldType, Defaults.TTL_FIELD_TYPE, indexSettings);
|
||||
this.enabledState = enabled;
|
||||
this.defaultTTL = defaultTTL;
|
||||
}
|
||||
|
@ -194,16 +190,6 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
super.parse(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.TTL_FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("long");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException, MapperParsingException {
|
||||
if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally
|
||||
|
|
|
@ -24,16 +24,12 @@ import org.apache.lucene.document.NumericDocValuesField;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.TimestampParsingException;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -41,10 +37,8 @@ import org.elasticsearch.index.mapper.MergeMappingException;
|
|||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -59,15 +53,16 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public static final String NAME = "_timestamp";
|
||||
public static final String CONTENT_TYPE = "_timestamp";
|
||||
public static final String DEFAULT_DATE_TIME_FORMAT = "epoch_millis||dateOptionalTime";
|
||||
public static final String DEFAULT_DATE_TIME_FORMAT = "epoch_millis||strictDateOptionalTime";
|
||||
|
||||
public static class Defaults extends DateFieldMapper.Defaults {
|
||||
public static final String NAME = "_timestamp";
|
||||
|
||||
// TODO: this should be removed
|
||||
public static final MappedFieldType PRE_20_FIELD_TYPE;
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT);
|
||||
public static final TimestampFieldType PRE_20_FIELD_TYPE;
|
||||
public static final TimestampFieldType FIELD_TYPE = new TimestampFieldType();
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT);
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER_BEFORE_2_0 = Joda.forPattern("epoch_millis||dateOptionalTime");
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setStored(true);
|
||||
|
@ -77,9 +72,14 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER);
|
||||
FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Defaults.PRECISION_STEP_64_BIT));
|
||||
FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE));
|
||||
FIELD_TYPE.setHasDocValues(true);
|
||||
FIELD_TYPE.freeze();
|
||||
PRE_20_FIELD_TYPE = FIELD_TYPE.clone();
|
||||
PRE_20_FIELD_TYPE.setStored(false);
|
||||
PRE_20_FIELD_TYPE.setHasDocValues(false);
|
||||
PRE_20_FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER_BEFORE_2_0);
|
||||
PRE_20_FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER_BEFORE_2_0, Defaults.PRECISION_STEP_64_BIT));
|
||||
PRE_20_FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER_BEFORE_2_0, Integer.MAX_VALUE));
|
||||
PRE_20_FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
|
@ -104,7 +104,8 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
DateFieldMapper.DateFieldType fieldType() {
|
||||
@Override
|
||||
public DateFieldMapper.DateFieldType fieldType() {
|
||||
return (DateFieldMapper.DateFieldType)fieldType;
|
||||
}
|
||||
|
||||
|
@ -144,9 +145,23 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
if (explicitStore == false && context.indexCreatedVersion().before(Version.V_2_0_0)) {
|
||||
fieldType.setStored(false);
|
||||
}
|
||||
|
||||
if (fieldType().dateTimeFormatter().equals(Defaults.DATE_TIME_FORMATTER)) {
|
||||
fieldType().setDateTimeFormatter(getDateTimeFormatter(context.indexSettings()));
|
||||
}
|
||||
|
||||
setupFieldType(context);
|
||||
return new TimestampFieldMapper(fieldType, docValues, enabledState, path, defaultTimestamp,
|
||||
ignoreMissing, fieldDataSettings, context.indexSettings());
|
||||
return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, path, defaultTimestamp,
|
||||
ignoreMissing, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
private static FormatDateTimeFormatter getDateTimeFormatter(Settings indexSettings) {
|
||||
Version indexCreated = Version.indexCreated(indexSettings);
|
||||
if (indexCreated.onOrAfter(Version.V_2_0_0)) {
|
||||
return Defaults.DATE_TIME_FORMATTER;
|
||||
} else {
|
||||
return Defaults.DATE_TIME_FORMATTER_BEFORE_2_0;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,7 +242,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private static MappedFieldType defaultFieldType(Settings settings, MappedFieldType existing) {
|
||||
private static MappedFieldType chooseFieldType(Settings settings, MappedFieldType existing) {
|
||||
if (existing != null) {
|
||||
return existing;
|
||||
}
|
||||
|
@ -238,22 +253,18 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
private final String path;
|
||||
private final String defaultTimestamp;
|
||||
private final MappedFieldType defaultFieldType;
|
||||
private final Boolean ignoreMissing;
|
||||
|
||||
public TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(defaultFieldType(indexSettings, existing).clone(), null, Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings);
|
||||
}
|
||||
|
||||
protected TimestampFieldMapper(MappedFieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path,
|
||||
String defaultTimestamp, Boolean ignoreMissing, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(NAME, fieldType, docValues, fieldDataSettings, indexSettings);
|
||||
protected TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String path,
|
||||
String defaultTimestamp, Boolean ignoreMissing, Settings indexSettings) {
|
||||
super(NAME, fieldType, defaultFieldType, indexSettings);
|
||||
this.enabledState = enabledState;
|
||||
this.path = path;
|
||||
this.defaultTimestamp = defaultTimestamp;
|
||||
this.defaultFieldType = defaultFieldType(indexSettings, null);
|
||||
this.ignoreMissing = ignoreMissing;
|
||||
}
|
||||
|
||||
|
@ -262,16 +273,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
return (TimestampFieldType)super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return defaultFieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("long");
|
||||
}
|
||||
|
||||
public boolean enabled() {
|
||||
return this.enabledState.enabled;
|
||||
}
|
||||
|
@ -335,7 +336,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH
|
||||
&& fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())
|
||||
&& Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)
|
||||
&& defaultDocValues() == fieldType().hasDocValues()) {
|
||||
&& defaultFieldType.hasDocValues() == fieldType().hasDocValues()) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
|
@ -354,7 +355,9 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
if (indexCreatedBefore2x && (includeDefaults || path != Defaults.PATH)) {
|
||||
builder.field("path", path);
|
||||
}
|
||||
if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())) {
|
||||
// different format handling depending on index version
|
||||
String defaultDateFormat = indexCreatedBefore2x ? Defaults.DATE_TIME_FORMATTER_BEFORE_2_0.format() : Defaults.DATE_TIME_FORMATTER.format();
|
||||
if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(defaultDateFormat)) {
|
||||
builder.field("format", fieldType().dateTimeFormatter().format());
|
||||
}
|
||||
if (includeDefaults || !Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) {
|
||||
|
@ -363,12 +366,8 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
if (includeDefaults || ignoreMissing != null) {
|
||||
builder.field("ignore_missing", ignoreMissing);
|
||||
}
|
||||
if (indexCreatedBefore2x) {
|
||||
if (hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || hasCustomFieldDataSettings())) {
|
||||
builder.field("fielddata", fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
|
|
|
@ -40,10 +40,9 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -61,7 +60,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public static final String CONTENT_TYPE = "_type";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = TypeFieldMapper.NAME;
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new TypeFieldType();
|
||||
|
@ -106,7 +105,9 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
static final class TypeFieldType extends MappedFieldType {
|
||||
|
||||
public TypeFieldType() {}
|
||||
public TypeFieldType() {
|
||||
setFieldDataType(new FieldDataType("string"));
|
||||
}
|
||||
|
||||
protected TypeFieldType(TypeFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -150,20 +151,9 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
public TypeFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(NAME, fieldType, false, null, indexSettings);
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("string");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
super.parse(context);
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexableField;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -36,11 +35,10 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -57,7 +55,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
public static final String CONTENT_TYPE = "_uid";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static class Defaults {
|
||||
public static final String NAME = UidFieldMapper.NAME;
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new UidFieldType();
|
||||
|
@ -88,8 +86,9 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public UidFieldMapper build(BuilderContext context) {
|
||||
fieldType.setNames(new MappedFieldType.Names(indexName, indexName, name));
|
||||
return new UidFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings());
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(context.indexCreatedVersion().before(Version.V_2_0_0));
|
||||
return new UidFieldMapper(fieldType, defaultFieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,7 +106,9 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
static final class UidFieldType extends MappedFieldType {
|
||||
|
||||
public UidFieldType() {}
|
||||
public UidFieldType() {
|
||||
setFieldDataType(new FieldDataType("string"));
|
||||
}
|
||||
|
||||
protected UidFieldType(UidFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -133,30 +134,11 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
public UidFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing, null,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing, Defaults.FIELD_TYPE, indexSettings);
|
||||
}
|
||||
|
||||
protected UidFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(NAME, fieldType, docValuesEnabled(docValues, indexSettings), fieldDataSettings, indexSettings);
|
||||
}
|
||||
|
||||
static Boolean docValuesEnabled(Boolean docValues, Settings indexSettings) {
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0)) {
|
||||
return false; // explicitly disable doc values for 2.0+, for now
|
||||
}
|
||||
return docValues;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("string");
|
||||
protected UidFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings) {
|
||||
super(NAME, fieldType, defaultFieldType, indexSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -230,9 +212,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
|
||||
if (hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
if (includeDefaults || hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
|
||||
|
|
|
@ -55,6 +55,7 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
|||
static {
|
||||
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
|
||||
FIELD_TYPE.setDocValuesType(DocValuesType.NUMERIC);
|
||||
FIELD_TYPE.setHasDocValues(true);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
@ -89,7 +90,9 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
static final class VersionFieldType extends MappedFieldType {
|
||||
|
||||
public VersionFieldType() {}
|
||||
public VersionFieldType() {
|
||||
setFieldDataType(new FieldDataType("long"));
|
||||
}
|
||||
|
||||
protected VersionFieldType(VersionFieldType ref) {
|
||||
super(ref);
|
||||
|
@ -116,7 +119,7 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
public VersionFieldMapper(Settings indexSettings) {
|
||||
super(NAME, Defaults.FIELD_TYPE, true, null, indexSettings);
|
||||
super(NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE, indexSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -148,16 +151,6 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("long");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
|
|
|
@ -20,10 +20,8 @@
|
|||
package org.elasticsearch.index.mapper.ip;
|
||||
|
||||
import com.google.common.net.InetAddresses;
|
||||
|
||||
import org.apache.lucene.analysis.NumericTokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType.NumericType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -45,9 +43,8 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -119,8 +116,8 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
@Override
|
||||
public IpFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
IpFieldMapper fieldMapper = new IpFieldMapper(name, fieldType, docValues, ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
IpFieldMapper fieldMapper = new IpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
|
@ -158,10 +155,10 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public static final class IpFieldType extends NumberFieldType {
|
||||
public static final class IpFieldType extends LongFieldMapper.LongFieldType {
|
||||
|
||||
public IpFieldType() {
|
||||
super(NumericType.LONG);
|
||||
setFieldDataType(new FieldDataType("long"));
|
||||
}
|
||||
|
||||
protected IpFieldType(IpFieldType ref) {
|
||||
|
@ -220,8 +217,8 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = ipToLong(value);
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = parseValue(value);
|
||||
long iSim;
|
||||
try {
|
||||
iSim = ipToLong(fuzziness.asString());
|
||||
|
@ -235,22 +232,10 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected IpFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
|
||||
protected IpFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, docValues, ignoreMalformed, coerce,
|
||||
fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("long");
|
||||
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
private static long parseValue(Object value) {
|
||||
|
|
|
@ -278,7 +278,10 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
|||
Iterator<Map.Entry<String, Object>> iterator = propsNode.entrySet().iterator();
|
||||
while (iterator.hasNext()) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = entry.getKey();
|
||||
String fieldName = entry.getKey();
|
||||
if (fieldName.contains(".")) {
|
||||
throw new MapperParsingException("Field name [" + fieldName + "] cannot contain '.'");
|
||||
}
|
||||
// Should accept empty arrays, as a work around for when the
|
||||
// user can't provide an empty Map. (PHP for example)
|
||||
boolean isEmptyList = entry.getValue() instanceof List && ((List<?>) entry.getValue()).isEmpty();
|
||||
|
@ -301,23 +304,23 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
|||
// any type, including core values, which
|
||||
type = ObjectMapper.CONTENT_TYPE;
|
||||
} else {
|
||||
throw new MapperParsingException("No type specified for property [" + propName + "]");
|
||||
throw new MapperParsingException("No type specified for field [" + fieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
Mapper.TypeParser typeParser = parserContext.typeParser(type);
|
||||
if (typeParser == null) {
|
||||
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + propName + "]");
|
||||
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + fieldName + "]");
|
||||
}
|
||||
objBuilder.add(typeParser.parse(propName, propNode, parserContext));
|
||||
objBuilder.add(typeParser.parse(fieldName, propNode, parserContext));
|
||||
propNode.remove("type");
|
||||
DocumentMapperParser.checkNoRemainingFields(propName, propNode, parserContext.indexVersionCreated());
|
||||
DocumentMapperParser.checkNoRemainingFields(fieldName, propNode, parserContext.indexVersionCreated());
|
||||
iterator.remove();
|
||||
} else if (isEmptyList) {
|
||||
iterator.remove();
|
||||
} else {
|
||||
throw new MapperParsingException("Expected map for property [fields] on field [" + propName + "] but got a "
|
||||
+ propName.getClass());
|
||||
throw new MapperParsingException("Expected map for property [fields] on field [" + fieldName + "] but got a "
|
||||
+ fieldName.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
public static final FormatDateTimeFormatter[] DYNAMIC_DATE_TIME_FORMATTERS =
|
||||
new FormatDateTimeFormatter[]{
|
||||
DateFieldMapper.Defaults.DATE_TIME_FORMATTER,
|
||||
Joda.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd")
|
||||
Joda.getStrictStandardDateFormatter()
|
||||
};
|
||||
public static final boolean DATE_DETECTION = true;
|
||||
public static final boolean NUMERIC_DETECTION = false;
|
||||
|
|
|
@ -49,16 +49,87 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder<FuzzyQueryBuilder> i
|
|||
static final FuzzyQueryBuilder PROTOTYPE = new FuzzyQueryBuilder(null, null);
|
||||
|
||||
/**
|
||||
* Constructs a new term query.
|
||||
* Constructs a new fuzzy query.
|
||||
*
|
||||
* @param name The name of the field
|
||||
* @param value The value of the term
|
||||
* @param value The value of the text
|
||||
*/
|
||||
public FuzzyQueryBuilder(String name, Object value) {
|
||||
this.name = name;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new fuzzy query.
|
||||
*
|
||||
* @param name The name of the field
|
||||
* @param value The value of the text
|
||||
*/
|
||||
public FuzzyQueryBuilder(String name, String value) {
|
||||
this(name, (Object) value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new fuzzy query.
|
||||
*
|
||||
* @param name The name of the field
|
||||
* @param value The value of the text
|
||||
*/
|
||||
public FuzzyQueryBuilder(String name, int value) {
|
||||
this(name, (Object) value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new fuzzy query.
|
||||
*
|
||||
* @param name The name of the field
|
||||
* @param value The value of the text
|
||||
*/
|
||||
public FuzzyQueryBuilder(String name, long value) {
|
||||
this(name, (Object) value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new fuzzy query.
|
||||
*
|
||||
* @param name The name of the field
|
||||
* @param value The value of the text
|
||||
*/
|
||||
public FuzzyQueryBuilder(String name, float value) {
|
||||
this(name, (Object) value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new fuzzy query.
|
||||
*
|
||||
* @param name The name of the field
|
||||
* @param value The value of the text
|
||||
*/
|
||||
public FuzzyQueryBuilder(String name, double value) {
|
||||
this(name, (Object) value);
|
||||
}
|
||||
|
||||
// NO COMMIT: not sure we should also allow boolean?
|
||||
/**
|
||||
* Constructs a new fuzzy query.
|
||||
*
|
||||
* @param name The name of the field
|
||||
* @param value The value of the text
|
||||
*/
|
||||
public FuzzyQueryBuilder(String name, boolean value) {
|
||||
this(name, (Object) value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the boost for this query. Documents matching this query will (in addition to the normal
|
||||
* weightings) have their score multiplied by the boost provided.
|
||||
*/
|
||||
@Override
|
||||
public FuzzyQueryBuilder boost(float boost) {
|
||||
this.boost = boost;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FuzzyQueryBuilder fuzziness(Fuzziness fuzziness) {
|
||||
this.fuzziness = fuzziness;
|
||||
return this;
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.MultiTermQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -60,12 +61,12 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
|
|||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
String value = null;
|
||||
Object value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
Fuzziness fuzziness = DEFAULT_FUZZINESS;
|
||||
int prefixLength = FuzzyQuery.defaultPrefixLength;
|
||||
int maxExpansions = FuzzyQuery.defaultMaxExpansions;
|
||||
boolean transpositions = false;
|
||||
boolean transpositions = FuzzyQuery.defaultTranspositions;
|
||||
String queryName = null;
|
||||
MultiTermQuery.RewriteMethod rewriteMethod = null;
|
||||
if (parseContext.isFilter()) {
|
||||
|
@ -79,9 +80,9 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
|
|||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
if ("term".equals(currentFieldName)) {
|
||||
value = parser.text();
|
||||
value = parser.objectBytes();
|
||||
} else if ("value".equals(currentFieldName)) {
|
||||
value = parser.text();
|
||||
value = parser.objectBytes();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) {
|
||||
|
@ -103,7 +104,7 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
|
|||
}
|
||||
parser.nextToken();
|
||||
} else {
|
||||
value = parser.text();
|
||||
value = parser.objectBytes();
|
||||
// move to the next token
|
||||
parser.nextToken();
|
||||
}
|
||||
|
@ -111,14 +112,15 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
|
|||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for fuzzy query");
|
||||
}
|
||||
|
||||
|
||||
Query query = null;
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType != null) {
|
||||
query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
|
||||
}
|
||||
if (query == null) {
|
||||
query = new FuzzyQuery(new Term(fieldName, value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
|
||||
int maxEdits = fuzziness.asDistance(BytesRefs.toString(value));
|
||||
query = new FuzzyQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), maxEdits, prefixLength, maxExpansions, transpositions);
|
||||
}
|
||||
if (query instanceof MultiTermQuery) {
|
||||
QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod);
|
||||
|
|
|
@ -33,7 +33,8 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
|
|||
private final String name;
|
||||
private final String regexp;
|
||||
|
||||
private int flags = -1;
|
||||
private int flags = RegexpQueryParser.DEFAULT_FLAGS_VALUE;
|
||||
|
||||
private String rewrite;
|
||||
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
|
||||
private boolean maxDetermizedStatesSet;
|
||||
|
|
|
@ -37,6 +37,8 @@ import java.io.IOException;
|
|||
*/
|
||||
public class RegexpQueryParser extends BaseQueryParserTemp {
|
||||
|
||||
public static final int DEFAULT_FLAGS_VALUE = RegexpFlag.ALL.value();
|
||||
|
||||
@Inject
|
||||
public RegexpQueryParser() {
|
||||
}
|
||||
|
@ -55,7 +57,7 @@ public class RegexpQueryParser extends BaseQueryParserTemp {
|
|||
|
||||
Object value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
int flagsValue = -1;
|
||||
int flagsValue = DEFAULT_FLAGS_VALUE;
|
||||
int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
|
|
|
@ -65,9 +65,8 @@ public class MatchQuery {
|
|||
protected int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
|
||||
|
||||
protected int maxExpansions = FuzzyQuery.defaultMaxExpansions;
|
||||
|
||||
//LUCENE 4 UPGRADE we need a default value for this!
|
||||
protected boolean transpositions = false;
|
||||
|
||||
protected boolean transpositions = FuzzyQuery.defaultTranspositions;
|
||||
|
||||
protected MultiTermQuery.RewriteMethod rewriteMethod;
|
||||
|
||||
|
|
|
@ -47,21 +47,38 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
private long fetchTimeInMillis;
|
||||
private long fetchCurrent;
|
||||
|
||||
private long scrollCount;
|
||||
private long scrollTimeInMillis;
|
||||
private long scrollCurrent;
|
||||
|
||||
Stats() {
|
||||
|
||||
}
|
||||
|
||||
public Stats(long queryCount, long queryTimeInMillis, long queryCurrent, long fetchCount, long fetchTimeInMillis, long fetchCurrent) {
|
||||
public Stats(
|
||||
long queryCount, long queryTimeInMillis, long queryCurrent,
|
||||
long fetchCount, long fetchTimeInMillis, long fetchCurrent,
|
||||
long scrollCount, long scrollTimeInMillis, long scrollCurrent
|
||||
) {
|
||||
this.queryCount = queryCount;
|
||||
this.queryTimeInMillis = queryTimeInMillis;
|
||||
this.queryCurrent = queryCurrent;
|
||||
|
||||
this.fetchCount = fetchCount;
|
||||
this.fetchTimeInMillis = fetchTimeInMillis;
|
||||
this.fetchCurrent = fetchCurrent;
|
||||
|
||||
this.scrollCount = scrollCount;
|
||||
this.scrollTimeInMillis = scrollTimeInMillis;
|
||||
this.scrollCurrent = scrollCurrent;
|
||||
}
|
||||
|
||||
public Stats(Stats stats) {
|
||||
this(stats.queryCount, stats.queryTimeInMillis, stats.queryCurrent, stats.fetchCount, stats.fetchTimeInMillis, stats.fetchCurrent);
|
||||
this(
|
||||
stats.queryCount, stats.queryTimeInMillis, stats.queryCurrent,
|
||||
stats.fetchCount, stats.fetchTimeInMillis, stats.fetchCurrent,
|
||||
stats.scrollCount, stats.scrollTimeInMillis, stats.scrollCurrent
|
||||
);
|
||||
}
|
||||
|
||||
public void add(Stats stats) {
|
||||
|
@ -72,6 +89,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
fetchCount += stats.fetchCount;
|
||||
fetchTimeInMillis += stats.fetchTimeInMillis;
|
||||
fetchCurrent += stats.fetchCurrent;
|
||||
|
||||
scrollCount += stats.scrollCount;
|
||||
scrollTimeInMillis += stats.scrollTimeInMillis;
|
||||
scrollCurrent += stats.scrollCurrent;
|
||||
}
|
||||
|
||||
public long getQueryCount() {
|
||||
|
@ -106,6 +127,21 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
return fetchCurrent;
|
||||
}
|
||||
|
||||
public long getScrollCount() {
|
||||
return scrollCount;
|
||||
}
|
||||
|
||||
public TimeValue getScrollTime() {
|
||||
return new TimeValue(scrollTimeInMillis);
|
||||
}
|
||||
|
||||
public long getScrollTimeInMillis() {
|
||||
return scrollTimeInMillis;
|
||||
}
|
||||
|
||||
public long getScrollCurrent() {
|
||||
return scrollCurrent;
|
||||
}
|
||||
|
||||
public static Stats readStats(StreamInput in) throws IOException {
|
||||
Stats stats = new Stats();
|
||||
|
@ -122,6 +158,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
fetchCount = in.readVLong();
|
||||
fetchTimeInMillis = in.readVLong();
|
||||
fetchCurrent = in.readVLong();
|
||||
|
||||
scrollCount = in.readVLong();
|
||||
scrollTimeInMillis = in.readVLong();
|
||||
scrollCurrent = in.readVLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -133,6 +173,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
out.writeVLong(fetchCount);
|
||||
out.writeVLong(fetchTimeInMillis);
|
||||
out.writeVLong(fetchCurrent);
|
||||
|
||||
out.writeVLong(scrollCount);
|
||||
out.writeVLong(scrollTimeInMillis);
|
||||
out.writeVLong(scrollCurrent);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -145,6 +189,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
builder.timeValueField(Fields.FETCH_TIME_IN_MILLIS, Fields.FETCH_TIME, fetchTimeInMillis);
|
||||
builder.field(Fields.FETCH_CURRENT, fetchCurrent);
|
||||
|
||||
builder.field(Fields.SCROLL_TOTAL, scrollCount);
|
||||
builder.timeValueField(Fields.SCROLL_TIME_IN_MILLIS, Fields.SCROLL_TIME, scrollTimeInMillis);
|
||||
builder.field(Fields.SCROLL_CURRENT, scrollCurrent);
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
@ -233,6 +281,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
static final XContentBuilderString FETCH_TIME = new XContentBuilderString("fetch_time");
|
||||
static final XContentBuilderString FETCH_TIME_IN_MILLIS = new XContentBuilderString("fetch_time_in_millis");
|
||||
static final XContentBuilderString FETCH_CURRENT = new XContentBuilderString("fetch_current");
|
||||
static final XContentBuilderString SCROLL_TOTAL = new XContentBuilderString("scroll_total");
|
||||
static final XContentBuilderString SCROLL_TIME = new XContentBuilderString("scroll_time");
|
||||
static final XContentBuilderString SCROLL_TIME_IN_MILLIS = new XContentBuilderString("scroll_time_in_millis");
|
||||
static final XContentBuilderString SCROLL_CURRENT = new XContentBuilderString("scroll_current");
|
||||
}
|
||||
|
||||
public static SearchStats readSearchStats(StreamInput in) throws IOException {
|
||||
|
|
|
@ -169,6 +169,15 @@ public final class ShardSearchStats {
|
|||
openContexts.dec();
|
||||
}
|
||||
|
||||
public void onNewScrollContext(SearchContext context) {
|
||||
totalStats.scrollCurrent.inc();
|
||||
}
|
||||
|
||||
public void onFreeScrollContext(SearchContext context) {
|
||||
totalStats.scrollCurrent.dec();
|
||||
totalStats.scrollMetric.inc(TimeUnit.MILLISECONDS.toNanos(System.currentTimeMillis() - context.nowInMillis()));
|
||||
}
|
||||
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
slowLogSearchService.onRefreshSettings(settings);
|
||||
}
|
||||
|
@ -176,21 +185,27 @@ public final class ShardSearchStats {
|
|||
final static class StatsHolder {
|
||||
public final MeanMetric queryMetric = new MeanMetric();
|
||||
public final MeanMetric fetchMetric = new MeanMetric();
|
||||
public final MeanMetric scrollMetric = new MeanMetric();
|
||||
public final CounterMetric queryCurrent = new CounterMetric();
|
||||
public final CounterMetric fetchCurrent = new CounterMetric();
|
||||
public final CounterMetric scrollCurrent = new CounterMetric();
|
||||
|
||||
public SearchStats.Stats stats() {
|
||||
return new SearchStats.Stats(queryMetric.count(), TimeUnit.NANOSECONDS.toMillis(queryMetric.sum()), queryCurrent.count(),
|
||||
fetchMetric.count(), TimeUnit.NANOSECONDS.toMillis(fetchMetric.sum()), fetchCurrent.count());
|
||||
return new SearchStats.Stats(
|
||||
queryMetric.count(), TimeUnit.NANOSECONDS.toMillis(queryMetric.sum()), queryCurrent.count(),
|
||||
fetchMetric.count(), TimeUnit.NANOSECONDS.toMillis(fetchMetric.sum()), fetchCurrent.count(),
|
||||
scrollMetric.count(), TimeUnit.NANOSECONDS.toMillis(scrollMetric.sum()), scrollCurrent.count()
|
||||
);
|
||||
}
|
||||
|
||||
public long totalCurrent() {
|
||||
return queryCurrent.count() + fetchCurrent.count();
|
||||
return queryCurrent.count() + fetchCurrent.count() + scrollCurrent.count();
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
queryMetric.clear();
|
||||
fetchMetric.clear();
|
||||
scrollMetric.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,6 +71,7 @@ public class IndexDynamicSettingsModule extends AbstractModule {
|
|||
indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_BLOCKS_WRITE);
|
||||
indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_BLOCKS_METADATA);
|
||||
indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE);
|
||||
indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_PRIORITY, Validator.NON_NEGATIVE_INTEGER);
|
||||
indexDynamicSettings.addDynamicSetting(IndicesTTLService.INDEX_TTL_DISABLE_PURGE);
|
||||
indexDynamicSettings.addDynamicSetting(IndexShard.INDEX_REFRESH_INTERVAL, Validator.TIME);
|
||||
indexDynamicSettings.addDynamicSetting(GatewayAllocator.INDEX_RECOVERY_INITIAL_SHARDS);
|
||||
|
|
|
@ -741,7 +741,11 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
}
|
||||
}
|
||||
|
||||
public void failShard(String reason, Throwable e) {
|
||||
/**
|
||||
* Fails the shard and marks the shard store as corrupted if
|
||||
* <code>e</code> is caused by index corruption
|
||||
*/
|
||||
public void failShard(String reason, @Nullable Throwable e) {
|
||||
// fail the engine. This will cause this shard to also be removed from the node's index service.
|
||||
engine().failEngine(reason, e);
|
||||
}
|
||||
|
@ -1271,18 +1275,11 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
// called by the current engine
|
||||
@Override
|
||||
public void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable failure) {
|
||||
try {
|
||||
// mark as corrupted, so opening the store will fail
|
||||
store.markStoreCorrupted(new IOException("failed engine (reason: [" + reason + "])", failure));
|
||||
} catch (IOException e) {
|
||||
logger.warn("failed to mark shard store as corrupted", e);
|
||||
} finally {
|
||||
for (Engine.FailedEngineListener listener : delegates) {
|
||||
try {
|
||||
listener.onFailedEngine(shardId, reason, failure);
|
||||
} catch (Exception e) {
|
||||
logger.warn("exception while notifying engine failure", e);
|
||||
}
|
||||
for (Engine.FailedEngineListener listener : delegates) {
|
||||
try {
|
||||
listener.onFailedEngine(shardId, reason, failure);
|
||||
} catch (Exception e) {
|
||||
logger.warn("exception while notifying engine failure", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -68,7 +68,8 @@ public class SharedFSRecoverySourceHandler extends RecoverySourceHandler {
|
|||
// that case, fail the shard to reallocate a new IndexShard and
|
||||
// create a new IndexWriter
|
||||
logger.info("recovery failed for primary shadow shard, failing shard");
|
||||
shard.failShard("primary relocation failed on shared filesystem", t);
|
||||
// pass the failure as null, as we want to ensure the store is not marked as corrupted
|
||||
shard.failShard("primary relocation failed on shared filesystem caused by: [" + t.getMessage() + "]", null);
|
||||
} else {
|
||||
logger.info("recovery failed on shared filesystem", t);
|
||||
}
|
||||
|
|
|
@ -23,12 +23,8 @@ import org.elasticsearch.common.inject.AbstractModule;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.monitor.fs.FsProbe;
|
||||
import org.elasticsearch.monitor.fs.FsService;
|
||||
import org.elasticsearch.monitor.fs.JmxFsProbe;
|
||||
import org.elasticsearch.monitor.jvm.JvmMonitorService;
|
||||
import org.elasticsearch.monitor.jvm.JvmService;
|
||||
import org.elasticsearch.monitor.network.JmxNetworkProbe;
|
||||
import org.elasticsearch.monitor.network.NetworkProbe;
|
||||
import org.elasticsearch.monitor.network.NetworkService;
|
||||
import org.elasticsearch.monitor.os.JmxOsProbe;
|
||||
import org.elasticsearch.monitor.os.OsProbe;
|
||||
import org.elasticsearch.monitor.os.OsService;
|
||||
|
@ -56,13 +52,11 @@ public class MonitorModule extends AbstractModule {
|
|||
// bind default implementations
|
||||
bind(ProcessProbe.class).to(JmxProcessProbe.class).asEagerSingleton();
|
||||
bind(OsProbe.class).to(JmxOsProbe.class).asEagerSingleton();
|
||||
bind(NetworkProbe.class).to(JmxNetworkProbe.class).asEagerSingleton();
|
||||
bind(FsProbe.class).to(JmxFsProbe.class).asEagerSingleton();
|
||||
bind(FsProbe.class).asEagerSingleton();
|
||||
|
||||
// bind other services
|
||||
bind(ProcessService.class).asEagerSingleton();
|
||||
bind(OsService.class).asEagerSingleton();
|
||||
bind(NetworkService.class).asEagerSingleton();
|
||||
bind(JvmService.class).asEagerSingleton();
|
||||
bind(FsService.class).asEagerSingleton();
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.monitor.fs.FsService;
|
||||
import org.elasticsearch.monitor.jvm.JvmMonitorService;
|
||||
import org.elasticsearch.monitor.jvm.JvmService;
|
||||
import org.elasticsearch.monitor.network.NetworkService;
|
||||
import org.elasticsearch.monitor.os.OsService;
|
||||
import org.elasticsearch.monitor.process.ProcessService;
|
||||
|
||||
|
@ -42,20 +41,17 @@ public class MonitorService extends AbstractLifecycleComponent<MonitorService> {
|
|||
|
||||
private final JvmService jvmService;
|
||||
|
||||
private final NetworkService networkService;
|
||||
|
||||
private final FsService fsService;
|
||||
|
||||
@Inject
|
||||
public MonitorService(Settings settings, JvmMonitorService jvmMonitorService,
|
||||
OsService osService, ProcessService processService, JvmService jvmService, NetworkService networkService,
|
||||
OsService osService, ProcessService processService, JvmService jvmService,
|
||||
FsService fsService) {
|
||||
super(settings);
|
||||
this.jvmMonitorService = jvmMonitorService;
|
||||
this.osService = osService;
|
||||
this.processService = processService;
|
||||
this.jvmService = jvmService;
|
||||
this.networkService = networkService;
|
||||
this.fsService = fsService;
|
||||
}
|
||||
|
||||
|
@ -71,10 +67,6 @@ public class MonitorService extends AbstractLifecycleComponent<MonitorService> {
|
|||
return this.jvmService;
|
||||
}
|
||||
|
||||
public NetworkService networkService() {
|
||||
return this.networkService;
|
||||
}
|
||||
|
||||
public FsService fsService() {
|
||||
return this.fsService;
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.monitor.fs;
|
|||
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
|
@ -35,54 +34,37 @@ import java.util.HashSet;
|
|||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
||||
public class FsInfo implements Iterable<FsInfo.Path>, Streamable, ToXContent {
|
||||
|
||||
public static class Info implements Streamable, ToXContent {
|
||||
public static class Path implements Streamable, ToXContent {
|
||||
|
||||
String path;
|
||||
@Nullable
|
||||
String mount;
|
||||
@Nullable
|
||||
String dev;
|
||||
/** File system type from {@code java.nio.file.FileStore type()}, if available. */
|
||||
@Nullable
|
||||
String type;
|
||||
long total = -1;
|
||||
long free = -1;
|
||||
long available = -1;
|
||||
long diskReads = -1;
|
||||
long diskWrites = -1;
|
||||
long diskReadBytes = -1;
|
||||
long diskWriteBytes = -1;
|
||||
double diskQueue = -1;
|
||||
double diskServiceTime = -1;
|
||||
|
||||
/** Uses Lucene's {@code IOUtils.spins} method to try to determine if the device backed by spinning media.
|
||||
* This is null if we could not determine it, true if it possibly spins, else false. */
|
||||
Boolean spins = null;
|
||||
|
||||
public Info() {
|
||||
public Path() {
|
||||
}
|
||||
|
||||
public Info(String path, @Nullable String mount, @Nullable String dev, long total, long free, long available, long diskReads,
|
||||
long diskWrites, long diskReadBytes, long diskWriteBytes, double diskQueue, double diskServiceTime) {
|
||||
public Path(String path, @Nullable String mount, long total, long free, long available) {
|
||||
this.path = path;
|
||||
this.mount = mount;
|
||||
this.dev = dev;
|
||||
this.total = total;
|
||||
this.free = free;
|
||||
this.available = available;
|
||||
this.diskReads = diskReads;
|
||||
this.diskWrites = diskWrites;
|
||||
this.diskReadBytes = diskReadBytes;
|
||||
this.diskWriteBytes = diskWriteBytes;
|
||||
this.diskQueue = diskQueue;
|
||||
this.diskServiceTime = diskServiceTime;
|
||||
}
|
||||
|
||||
static public Info readInfoFrom(StreamInput in) throws IOException {
|
||||
Info i = new Info();
|
||||
static public Path readInfoFrom(StreamInput in) throws IOException {
|
||||
Path i = new Path();
|
||||
i.readFrom(in);
|
||||
return i;
|
||||
}
|
||||
|
@ -91,17 +73,10 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
path = in.readOptionalString();
|
||||
mount = in.readOptionalString();
|
||||
dev = in.readOptionalString();
|
||||
type = in.readOptionalString();
|
||||
total = in.readLong();
|
||||
free = in.readLong();
|
||||
available = in.readLong();
|
||||
diskReads = in.readLong();
|
||||
diskWrites = in.readLong();
|
||||
diskReadBytes = in.readLong();
|
||||
diskWriteBytes = in.readLong();
|
||||
diskQueue = in.readDouble();
|
||||
diskServiceTime = in.readDouble();
|
||||
spins = in.readOptionalBoolean();
|
||||
}
|
||||
|
||||
|
@ -109,17 +84,10 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalString(path); // total aggregates do not have a path
|
||||
out.writeOptionalString(mount);
|
||||
out.writeOptionalString(dev);
|
||||
out.writeOptionalString(type);
|
||||
out.writeLong(total);
|
||||
out.writeLong(free);
|
||||
out.writeLong(available);
|
||||
out.writeLong(diskReads);
|
||||
out.writeLong(diskWrites);
|
||||
out.writeLong(diskReadBytes);
|
||||
out.writeLong(diskWriteBytes);
|
||||
out.writeDouble(diskQueue);
|
||||
out.writeDouble(diskServiceTime);
|
||||
out.writeOptionalBoolean(spins);
|
||||
}
|
||||
|
||||
|
@ -131,10 +99,6 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
return mount;
|
||||
}
|
||||
|
||||
public String getDev() {
|
||||
return dev;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
@ -151,38 +115,6 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
return new ByteSizeValue(available);
|
||||
}
|
||||
|
||||
public long getDiskReads() {
|
||||
return this.diskReads;
|
||||
}
|
||||
|
||||
public long getDiskWrites() {
|
||||
return this.diskWrites;
|
||||
}
|
||||
|
||||
public long getDiskReadSizeInBytes() {
|
||||
return diskReadBytes;
|
||||
}
|
||||
|
||||
public ByteSizeValue getDiskReadSizeSize() {
|
||||
return new ByteSizeValue(diskReadBytes);
|
||||
}
|
||||
|
||||
public long getDiskWriteSizeInBytes() {
|
||||
return diskWriteBytes;
|
||||
}
|
||||
|
||||
public ByteSizeValue getDiskWriteSizeSize() {
|
||||
return new ByteSizeValue(diskWriteBytes);
|
||||
}
|
||||
|
||||
public double getDiskQueue() {
|
||||
return diskQueue;
|
||||
}
|
||||
|
||||
public double getDiskServiceTime() {
|
||||
return diskServiceTime;
|
||||
}
|
||||
|
||||
public Boolean getSpins() {
|
||||
return spins;
|
||||
}
|
||||
|
@ -207,17 +139,11 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
return current + other;
|
||||
}
|
||||
|
||||
public void add(Info info) {
|
||||
total = addLong(total, info.total);
|
||||
free = addLong(free, info.free);
|
||||
available = addLong(available, info.available);
|
||||
diskReads = addLong(diskReads, info.diskReads);
|
||||
diskWrites = addLong(diskWrites, info.diskWrites);
|
||||
diskReadBytes = addLong(diskReadBytes, info.diskReadBytes);
|
||||
diskWriteBytes = addLong(diskWriteBytes, info.diskWriteBytes);
|
||||
diskQueue = addDouble(diskQueue, info.diskQueue);
|
||||
diskServiceTime = addDouble(diskServiceTime, info.diskServiceTime);
|
||||
if (info.spins != null && info.spins.booleanValue()) {
|
||||
public void add(Path path) {
|
||||
total = addLong(total, path.total);
|
||||
free = addLong(free, path.free);
|
||||
available = addLong(available, path.available);
|
||||
if (path.spins != null && path.spins.booleanValue()) {
|
||||
// Spinning is contagious!
|
||||
spins = Boolean.TRUE;
|
||||
}
|
||||
|
@ -226,7 +152,6 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
static final class Fields {
|
||||
static final XContentBuilderString PATH = new XContentBuilderString("path");
|
||||
static final XContentBuilderString MOUNT = new XContentBuilderString("mount");
|
||||
static final XContentBuilderString DEV = new XContentBuilderString("dev");
|
||||
static final XContentBuilderString TYPE = new XContentBuilderString("type");
|
||||
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
|
||||
static final XContentBuilderString TOTAL_IN_BYTES = new XContentBuilderString("total_in_bytes");
|
||||
|
@ -234,17 +159,6 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
static final XContentBuilderString FREE_IN_BYTES = new XContentBuilderString("free_in_bytes");
|
||||
static final XContentBuilderString AVAILABLE = new XContentBuilderString("available");
|
||||
static final XContentBuilderString AVAILABLE_IN_BYTES = new XContentBuilderString("available_in_bytes");
|
||||
static final XContentBuilderString DISK_READS = new XContentBuilderString("disk_reads");
|
||||
static final XContentBuilderString DISK_WRITES = new XContentBuilderString("disk_writes");
|
||||
static final XContentBuilderString DISK_IO_OP = new XContentBuilderString("disk_io_op");
|
||||
static final XContentBuilderString DISK_READ_SIZE = new XContentBuilderString("disk_read_size");
|
||||
static final XContentBuilderString DISK_READ_SIZE_IN_BYTES = new XContentBuilderString("disk_read_size_in_bytes");
|
||||
static final XContentBuilderString DISK_WRITE_SIZE = new XContentBuilderString("disk_write_size");
|
||||
static final XContentBuilderString DISK_WRITE_SIZE_IN_BYTES = new XContentBuilderString("disk_write_size_in_bytes");
|
||||
static final XContentBuilderString DISK_IO_SIZE = new XContentBuilderString("disk_io_size");
|
||||
static final XContentBuilderString DISK_IO_IN_BYTES = new XContentBuilderString("disk_io_size_in_bytes");
|
||||
static final XContentBuilderString DISK_QUEUE = new XContentBuilderString("disk_queue");
|
||||
static final XContentBuilderString DISK_SERVICE_TIME = new XContentBuilderString("disk_service_time");
|
||||
static final XContentBuilderString SPINS = new XContentBuilderString("spins");
|
||||
}
|
||||
|
||||
|
@ -257,9 +171,6 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
if (mount != null) {
|
||||
builder.field(Fields.MOUNT, mount, XContentBuilder.FieldCaseConversion.NONE);
|
||||
}
|
||||
if (dev != null) {
|
||||
builder.field(Fields.DEV, dev, XContentBuilder.FieldCaseConversion.NONE);
|
||||
}
|
||||
if (type != null) {
|
||||
builder.field(Fields.TYPE, type, XContentBuilder.FieldCaseConversion.NONE);
|
||||
}
|
||||
|
@ -273,51 +184,6 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
if (available != -1) {
|
||||
builder.byteSizeField(Fields.AVAILABLE_IN_BYTES, Fields.AVAILABLE, available);
|
||||
}
|
||||
|
||||
long iop = -1;
|
||||
|
||||
if (diskReads != -1) {
|
||||
iop = diskReads;
|
||||
builder.field(Fields.DISK_READS, diskReads);
|
||||
}
|
||||
if (diskWrites != -1) {
|
||||
if (iop != -1) {
|
||||
iop += diskWrites;
|
||||
} else {
|
||||
iop = diskWrites;
|
||||
}
|
||||
builder.field(Fields.DISK_WRITES, diskWrites);
|
||||
}
|
||||
|
||||
if (iop != -1) {
|
||||
builder.field(Fields.DISK_IO_OP, iop);
|
||||
}
|
||||
|
||||
long ioBytes = -1;
|
||||
|
||||
if (diskReadBytes != -1) {
|
||||
ioBytes = diskReadBytes;
|
||||
builder.byteSizeField(Fields.DISK_READ_SIZE_IN_BYTES, Fields.DISK_READ_SIZE, diskReadBytes);
|
||||
}
|
||||
if (diskWriteBytes != -1) {
|
||||
if (ioBytes != -1) {
|
||||
ioBytes += diskWriteBytes;
|
||||
} else {
|
||||
ioBytes = diskWriteBytes;
|
||||
}
|
||||
builder.byteSizeField(Fields.DISK_WRITE_SIZE_IN_BYTES, Fields.DISK_WRITE_SIZE, diskWriteBytes);
|
||||
}
|
||||
|
||||
if (ioBytes != -1) {
|
||||
builder.byteSizeField(Fields.DISK_IO_IN_BYTES, Fields.DISK_IO_SIZE, ioBytes);
|
||||
}
|
||||
|
||||
if (diskQueue != -1) {
|
||||
builder.field(Fields.DISK_QUEUE, Strings.format1Decimals(diskQueue, ""));
|
||||
}
|
||||
if (diskServiceTime != -1) {
|
||||
builder.field(Fields.DISK_SERVICE_TIME, Strings.format1Decimals(diskServiceTime, ""));
|
||||
}
|
||||
if (spins != null) {
|
||||
builder.field(Fields.SPINS, spins.toString());
|
||||
}
|
||||
|
@ -328,36 +194,36 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
}
|
||||
|
||||
long timestamp;
|
||||
Info total;
|
||||
Info[] infos;
|
||||
Path total;
|
||||
Path[] paths;
|
||||
|
||||
FsStats() {
|
||||
FsInfo() {
|
||||
|
||||
}
|
||||
|
||||
public FsStats(long timestamp, Info[] infos) {
|
||||
public FsInfo(long timestamp, Path[] paths) {
|
||||
this.timestamp = timestamp;
|
||||
this.infos = infos;
|
||||
this.paths = paths;
|
||||
this.total = null;
|
||||
}
|
||||
|
||||
public Info getTotal() {
|
||||
public Path getTotal() {
|
||||
return total();
|
||||
}
|
||||
|
||||
public Info total() {
|
||||
public Path total() {
|
||||
if (total != null) {
|
||||
return total;
|
||||
}
|
||||
Info res = new Info();
|
||||
Set<String> seenDevices = new HashSet<>(infos.length);
|
||||
for (Info subInfo : infos) {
|
||||
if (subInfo.dev != null) {
|
||||
if (!seenDevices.add(subInfo.dev)) {
|
||||
Path res = new Path();
|
||||
Set<String> seenDevices = new HashSet<>(paths.length);
|
||||
for (Path subPath : paths) {
|
||||
if (subPath.path != null) {
|
||||
if (!seenDevices.add(subPath.path)) {
|
||||
continue; // already added numbers for this device;
|
||||
}
|
||||
}
|
||||
res.add(subInfo);
|
||||
res.add(subPath);
|
||||
}
|
||||
total = res;
|
||||
return res;
|
||||
|
@ -368,12 +234,12 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Info> iterator() {
|
||||
return Iterators.forArray(infos);
|
||||
public Iterator<Path> iterator() {
|
||||
return Iterators.forArray(paths);
|
||||
}
|
||||
|
||||
public static FsStats readFsStats(StreamInput in) throws IOException {
|
||||
FsStats stats = new FsStats();
|
||||
public static FsInfo readFsInfo(StreamInput in) throws IOException {
|
||||
FsInfo stats = new FsInfo();
|
||||
stats.readFrom(in);
|
||||
return stats;
|
||||
}
|
||||
|
@ -381,18 +247,18 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
timestamp = in.readVLong();
|
||||
infos = new Info[in.readVInt()];
|
||||
for (int i = 0; i < infos.length; i++) {
|
||||
infos[i] = Info.readInfoFrom(in);
|
||||
paths = new Path[in.readVInt()];
|
||||
for (int i = 0; i < paths.length; i++) {
|
||||
paths[i] = Path.readInfoFrom(in);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(timestamp);
|
||||
out.writeVInt(infos.length);
|
||||
for (Info info : infos) {
|
||||
info.writeTo(out);
|
||||
out.writeVInt(paths.length);
|
||||
for (Path path : paths) {
|
||||
path.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -410,8 +276,8 @@ public class FsStats implements Iterable<FsStats.Info>, Streamable, ToXContent {
|
|||
builder.field(Fields.TOTAL);
|
||||
total().toXContent(builder, params);
|
||||
builder.startArray(Fields.DATA);
|
||||
for (Info info : infos) {
|
||||
info.toXContent(builder, params);
|
||||
for (Path path : paths) {
|
||||
path.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
builder.endObject();
|
|
@ -19,11 +19,49 @@
|
|||
|
||||
package org.elasticsearch.monitor.fs;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.NodeEnvironment.NodePath;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public interface FsProbe {
|
||||
public class FsProbe extends AbstractComponent {
|
||||
|
||||
FsStats stats() throws IOException;
|
||||
private final NodeEnvironment nodeEnv;
|
||||
|
||||
@Inject
|
||||
public FsProbe(Settings settings, NodeEnvironment nodeEnv) {
|
||||
super(settings);
|
||||
this.nodeEnv = nodeEnv;
|
||||
}
|
||||
|
||||
public FsInfo stats() throws IOException {
|
||||
if (!nodeEnv.hasNodeFile()) {
|
||||
return new FsInfo(System.currentTimeMillis(), new FsInfo.Path[0]);
|
||||
}
|
||||
NodePath[] dataLocations = nodeEnv.nodePaths();
|
||||
FsInfo.Path[] paths = new FsInfo.Path[dataLocations.length];
|
||||
for (int i = 0; i < dataLocations.length; i++) {
|
||||
paths[i] = getFSInfo(dataLocations[i]);
|
||||
}
|
||||
return new FsInfo(System.currentTimeMillis(), paths);
|
||||
}
|
||||
|
||||
public static FsInfo.Path getFSInfo(NodePath nodePath) throws IOException {
|
||||
FsInfo.Path fsPath = new FsInfo.Path();
|
||||
fsPath.path = nodePath.path.toAbsolutePath().toString();
|
||||
|
||||
// NOTE: we use already cached (on node startup) FileStore and spins
|
||||
// since recomputing these once per second (default) could be costly,
|
||||
// and they should not change:
|
||||
fsPath.total = nodePath.fileStore.getTotalSpace();
|
||||
fsPath.free = nodePath.fileStore.getUnallocatedSpace();
|
||||
fsPath.available = nodePath.fileStore.getUsableSpace();
|
||||
fsPath.type = nodePath.fileStore.type();
|
||||
fsPath.mount = nodePath.fileStore.toString();
|
||||
fsPath.spins = nodePath.spins;
|
||||
return fsPath;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,33 +33,33 @@ public class FsService extends AbstractComponent {
|
|||
|
||||
private final FsProbe probe;
|
||||
|
||||
private final SingleObjectCache<FsStats> fsStatsCache;
|
||||
private final SingleObjectCache<FsInfo> fsStatsCache;
|
||||
|
||||
@Inject
|
||||
public FsService(Settings settings, FsProbe probe) throws IOException {
|
||||
super(settings);
|
||||
this.probe = probe;
|
||||
TimeValue refreshInterval = settings.getAsTime("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1));
|
||||
fsStatsCache = new FsStatsCache(refreshInterval, probe.stats());
|
||||
fsStatsCache = new FsInfoCache(refreshInterval, probe.stats());
|
||||
logger.debug("Using probe [{}] with refresh_interval [{}]", probe, refreshInterval);
|
||||
}
|
||||
|
||||
public FsStats stats() {
|
||||
public FsInfo stats() {
|
||||
return fsStatsCache.getOrRefresh();
|
||||
}
|
||||
|
||||
private class FsStatsCache extends SingleObjectCache<FsStats> {
|
||||
public FsStatsCache(TimeValue interval, FsStats initValue) {
|
||||
private class FsInfoCache extends SingleObjectCache<FsInfo> {
|
||||
public FsInfoCache(TimeValue interval, FsInfo initValue) {
|
||||
super(interval, initValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FsStats refresh() {
|
||||
protected FsInfo refresh() {
|
||||
try {
|
||||
return probe.stats();
|
||||
} catch (IOException ex) {
|
||||
logger.warn("Failed to fetch fs stats - returning empty instance");
|
||||
return new FsStats();
|
||||
return new FsInfo();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.monitor.fs;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.NodeEnvironment.NodePath;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class JmxFsProbe extends AbstractComponent implements FsProbe {
|
||||
|
||||
private final NodeEnvironment nodeEnv;
|
||||
|
||||
@Inject
|
||||
public JmxFsProbe(Settings settings, NodeEnvironment nodeEnv) {
|
||||
super(settings);
|
||||
this.nodeEnv = nodeEnv;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FsStats stats() throws IOException {
|
||||
if (!nodeEnv.hasNodeFile()) {
|
||||
return new FsStats(System.currentTimeMillis(), new FsStats.Info[0]);
|
||||
}
|
||||
NodePath[] dataLocations = nodeEnv.nodePaths();
|
||||
FsStats.Info[] infos = new FsStats.Info[dataLocations.length];
|
||||
for (int i = 0; i < dataLocations.length; i++) {
|
||||
infos[i] = getFSInfo(dataLocations[i]);
|
||||
}
|
||||
return new FsStats(System.currentTimeMillis(), infos);
|
||||
}
|
||||
|
||||
public static FsStats.Info getFSInfo(NodePath nodePath) throws IOException {
|
||||
FsStats.Info info = new FsStats.Info();
|
||||
info.path = nodePath.path.toAbsolutePath().toString();
|
||||
|
||||
// NOTE: we use already cached (on node startup) FileStore and spins
|
||||
// since recomputing these once per second (default) could be costly,
|
||||
// and they should not change:
|
||||
info.total = nodePath.fileStore.getTotalSpace();
|
||||
info.free = nodePath.fileStore.getUnallocatedSpace();
|
||||
info.available = nodePath.fileStore.getUsableSpace();
|
||||
info.type = nodePath.fileStore.type();
|
||||
info.mount = nodePath.fileStore.toString();
|
||||
info.spins = nodePath.spins;
|
||||
return info;
|
||||
}
|
||||
}
|
|
@ -1,161 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.monitor.network;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class NetworkInfo implements Streamable, ToXContent {
|
||||
|
||||
public static final Interface NA_INTERFACE = new Interface();
|
||||
|
||||
long refreshInterval;
|
||||
|
||||
Interface primary = NA_INTERFACE;
|
||||
|
||||
public long refreshInterval() {
|
||||
return this.refreshInterval;
|
||||
}
|
||||
|
||||
public long getRefreshInterval() {
|
||||
return this.refreshInterval;
|
||||
}
|
||||
|
||||
public Interface primaryInterface() {
|
||||
return primary;
|
||||
}
|
||||
|
||||
public Interface getPrimaryInterface() {
|
||||
return primaryInterface();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString NETWORK = new XContentBuilderString("network");
|
||||
static final XContentBuilderString REFRESH_INTERVAL = new XContentBuilderString("refresh_interval");
|
||||
static final XContentBuilderString REFRESH_INTERVAL_IN_MILLIS = new XContentBuilderString("refresh_interval_in_millis");
|
||||
static final XContentBuilderString PRIMARY_INTERFACE = new XContentBuilderString("primary_interface");
|
||||
static final XContentBuilderString ADDRESS = new XContentBuilderString("address");
|
||||
static final XContentBuilderString NAME = new XContentBuilderString("name");
|
||||
static final XContentBuilderString MAC_ADDRESS = new XContentBuilderString("mac_address");
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.NETWORK);
|
||||
builder.timeValueField(Fields.REFRESH_INTERVAL_IN_MILLIS, Fields.REFRESH_INTERVAL, refreshInterval);
|
||||
if (primary != NA_INTERFACE) {
|
||||
builder.startObject(Fields.PRIMARY_INTERFACE);
|
||||
builder.field(Fields.ADDRESS, primary.address());
|
||||
builder.field(Fields.NAME, primary.name());
|
||||
builder.field(Fields.MAC_ADDRESS, primary.macAddress());
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static NetworkInfo readNetworkInfo(StreamInput in) throws IOException {
|
||||
NetworkInfo info = new NetworkInfo();
|
||||
info.readFrom(in);
|
||||
return info;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
refreshInterval = in.readLong();
|
||||
primary = Interface.readNetworkInterface(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(refreshInterval);
|
||||
primary.writeTo(out);
|
||||
}
|
||||
|
||||
public static class Interface implements Streamable {
|
||||
|
||||
private String name = "";
|
||||
private String address = "";
|
||||
private String macAddress = "";
|
||||
|
||||
private Interface() {
|
||||
}
|
||||
|
||||
public Interface(String name, String address, String macAddress) {
|
||||
this.name = name;
|
||||
this.address = address;
|
||||
this.macAddress = macAddress;
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name();
|
||||
}
|
||||
|
||||
public String address() {
|
||||
return address;
|
||||
}
|
||||
|
||||
public String getAddress() {
|
||||
return address();
|
||||
}
|
||||
|
||||
public String macAddress() {
|
||||
return macAddress;
|
||||
}
|
||||
|
||||
public String getMacAddress() {
|
||||
return macAddress();
|
||||
}
|
||||
|
||||
public static Interface readNetworkInterface(StreamInput in) throws IOException {
|
||||
Interface inf = new Interface();
|
||||
inf.readFrom(in);
|
||||
return inf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
address = in.readString();
|
||||
macAddress = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeString(address);
|
||||
out.writeString(macAddress);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -1,130 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.monitor.network;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.SingleObjectCache;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.NetworkInterface;
|
||||
import java.util.Enumeration;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public final class NetworkService extends AbstractComponent {
|
||||
|
||||
private final NetworkProbe probe;
|
||||
|
||||
private final NetworkInfo info;
|
||||
|
||||
private final SingleObjectCache<NetworkStats> networkStatsCache;
|
||||
|
||||
@Inject
|
||||
public NetworkService(Settings settings, NetworkProbe probe) {
|
||||
super(settings);
|
||||
this.probe = probe;
|
||||
|
||||
TimeValue refreshInterval = settings.getAsTime("monitor.network.refresh_interval", TimeValue.timeValueSeconds(5));
|
||||
|
||||
logger.debug("Using probe [{}] with refresh_interval [{}]", probe, refreshInterval);
|
||||
|
||||
this.info = probe.networkInfo();
|
||||
this.info.refreshInterval = refreshInterval.millis();
|
||||
networkStatsCache = new NetworkStatsCache(refreshInterval, probe.networkStats());
|
||||
if (logger.isDebugEnabled()) {
|
||||
StringBuilder netDebug = new StringBuilder("net_info");
|
||||
try {
|
||||
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
|
||||
String hostName = InetAddress.getLocalHost().getHostName();
|
||||
netDebug.append("\nhost [").append(hostName).append("]\n");
|
||||
while (interfaces.hasMoreElements()) {
|
||||
NetworkInterface net = interfaces.nextElement();
|
||||
|
||||
netDebug.append(net.getName()).append('\t').append("display_name [").append(net.getDisplayName()).append("]\n");
|
||||
Enumeration<InetAddress> addresses = net.getInetAddresses();
|
||||
netDebug.append("\t\taddress ");
|
||||
while (addresses.hasMoreElements()) {
|
||||
netDebug.append("[").append(addresses.nextElement()).append("] ");
|
||||
}
|
||||
netDebug.append('\n');
|
||||
netDebug.append("\t\tmtu [").append(net.getMTU()).append("] multicast [").append(net.supportsMulticast()).append("] ptp [").append(net.isPointToPoint())
|
||||
.append("] loopback [").append(net.isLoopback()).append("] up [").append(net.isUp()).append("] virtual [").append(net.isVirtual()).append("]")
|
||||
.append('\n');
|
||||
|
||||
Enumeration<NetworkInterface> subInterfaces = net.getSubInterfaces();
|
||||
if (subInterfaces != null && subInterfaces.hasMoreElements()) {
|
||||
netDebug.append("\t\t\tsub interfaces:\n");
|
||||
|
||||
while (subInterfaces.hasMoreElements()) {
|
||||
|
||||
net = subInterfaces.nextElement();
|
||||
|
||||
netDebug.append("\t\t\t").append(net.getName()).append("\t").append("display_name [").append(net.getDisplayName()).append("]\n");
|
||||
addresses = net.getInetAddresses();
|
||||
netDebug.append("\t\t\t\t\taddress ");
|
||||
while (addresses.hasMoreElements()) {
|
||||
netDebug.append("[").append(addresses.nextElement()).append("] ");
|
||||
}
|
||||
netDebug.append('\n');
|
||||
netDebug.append("\t\t\t\t\tmtu [").append(net.getMTU()).append("] multicast [").append(net.supportsMulticast()).append("] ptp [").append(net.isPointToPoint())
|
||||
.append("] loopback [").append(net.isLoopback()).append("] up [").append(net.isUp()).append("] virtual [").append(net.isVirtual()).append("]")
|
||||
.append('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
netDebug.append("failed to get Network Interface Info [" + ex.getMessage() + "]");
|
||||
}
|
||||
logger.debug(netDebug.toString());
|
||||
}
|
||||
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("ifconfig\n\n" + ifconfig());
|
||||
}
|
||||
stats(); // pull the stats one time
|
||||
}
|
||||
|
||||
public NetworkInfo info() {
|
||||
return this.info;
|
||||
}
|
||||
|
||||
public NetworkStats stats() {
|
||||
return networkStatsCache.getOrRefresh();
|
||||
}
|
||||
|
||||
private class NetworkStatsCache extends SingleObjectCache<NetworkStats> {
|
||||
public NetworkStatsCache(TimeValue interval, NetworkStats initValue) {
|
||||
super(interval, initValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NetworkStats refresh() {
|
||||
return probe.networkStats();
|
||||
}
|
||||
}
|
||||
|
||||
public String ifconfig() {
|
||||
return probe.ifconfig();
|
||||
}
|
||||
}
|
|
@ -1,248 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.monitor.network;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class NetworkStats implements Streamable, ToXContent {
|
||||
|
||||
long timestamp;
|
||||
|
||||
Tcp tcp = null;
|
||||
|
||||
NetworkStats() {
|
||||
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString NETWORK = new XContentBuilderString("network");
|
||||
static final XContentBuilderString TCP = new XContentBuilderString("tcp");
|
||||
static final XContentBuilderString ACTIVE_OPENS = new XContentBuilderString("active_opens");
|
||||
static final XContentBuilderString PASSIVE_OPENS = new XContentBuilderString("passive_opens");
|
||||
static final XContentBuilderString CURR_ESTAB = new XContentBuilderString("curr_estab");
|
||||
static final XContentBuilderString IN_SEGS = new XContentBuilderString("in_segs");
|
||||
static final XContentBuilderString OUT_SEGS = new XContentBuilderString("out_segs");
|
||||
static final XContentBuilderString RETRANS_SEGS = new XContentBuilderString("retrans_segs");
|
||||
static final XContentBuilderString ESTAB_RESETS = new XContentBuilderString("estab_resets");
|
||||
static final XContentBuilderString ATTEMPT_FAILS = new XContentBuilderString("attempt_fails");
|
||||
static final XContentBuilderString IN_ERRS = new XContentBuilderString("in_errs");
|
||||
static final XContentBuilderString OUT_RSTS = new XContentBuilderString("out_rsts");
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.NETWORK);
|
||||
if (tcp != null) {
|
||||
builder.startObject(Fields.TCP);
|
||||
builder.field(Fields.ACTIVE_OPENS, tcp.getActiveOpens());
|
||||
builder.field(Fields.PASSIVE_OPENS, tcp.getPassiveOpens());
|
||||
builder.field(Fields.CURR_ESTAB, tcp.getCurrEstab());
|
||||
builder.field(Fields.IN_SEGS, tcp.getInSegs());
|
||||
builder.field(Fields.OUT_SEGS, tcp.getOutSegs());
|
||||
builder.field(Fields.RETRANS_SEGS, tcp.getRetransSegs());
|
||||
builder.field(Fields.ESTAB_RESETS, tcp.getEstabResets());
|
||||
builder.field(Fields.ATTEMPT_FAILS, tcp.getAttemptFails());
|
||||
builder.field(Fields.IN_ERRS, tcp.getInErrs());
|
||||
builder.field(Fields.OUT_RSTS, tcp.getOutRsts());
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static NetworkStats readNetworkStats(StreamInput in) throws IOException {
|
||||
NetworkStats stats = new NetworkStats();
|
||||
stats.readFrom(in);
|
||||
return stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
timestamp = in.readVLong();
|
||||
if (in.readBoolean()) {
|
||||
tcp = Tcp.readNetworkTcp(in);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(timestamp);
|
||||
if (tcp == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
tcp.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
public long timestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
return timestamp();
|
||||
}
|
||||
|
||||
public Tcp tcp() {
|
||||
return tcp;
|
||||
}
|
||||
|
||||
public Tcp getTcp() {
|
||||
return tcp();
|
||||
}
|
||||
|
||||
public static class Tcp implements Streamable {
|
||||
|
||||
long activeOpens;
|
||||
long passiveOpens;
|
||||
long attemptFails;
|
||||
long estabResets;
|
||||
long currEstab;
|
||||
long inSegs;
|
||||
long outSegs;
|
||||
long retransSegs;
|
||||
long inErrs;
|
||||
long outRsts;
|
||||
|
||||
public static Tcp readNetworkTcp(StreamInput in) throws IOException {
|
||||
Tcp tcp = new Tcp();
|
||||
tcp.readFrom(in);
|
||||
return tcp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
activeOpens = in.readLong();
|
||||
passiveOpens = in.readLong();
|
||||
attemptFails = in.readLong();
|
||||
estabResets = in.readLong();
|
||||
currEstab = in.readLong();
|
||||
inSegs = in.readLong();
|
||||
outSegs = in.readLong();
|
||||
retransSegs = in.readLong();
|
||||
inErrs = in.readLong();
|
||||
outRsts = in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(activeOpens);
|
||||
out.writeLong(passiveOpens);
|
||||
out.writeLong(attemptFails);
|
||||
out.writeLong(estabResets);
|
||||
out.writeLong(currEstab);
|
||||
out.writeLong(inSegs);
|
||||
out.writeLong(outSegs);
|
||||
out.writeLong(retransSegs);
|
||||
out.writeLong(inErrs);
|
||||
out.writeLong(outRsts);
|
||||
}
|
||||
|
||||
public long activeOpens() {
|
||||
return this.activeOpens;
|
||||
}
|
||||
|
||||
public long getActiveOpens() {
|
||||
return activeOpens();
|
||||
}
|
||||
|
||||
public long passiveOpens() {
|
||||
return passiveOpens;
|
||||
}
|
||||
|
||||
public long getPassiveOpens() {
|
||||
return passiveOpens();
|
||||
}
|
||||
|
||||
public long attemptFails() {
|
||||
return attemptFails;
|
||||
}
|
||||
|
||||
public long getAttemptFails() {
|
||||
return attemptFails();
|
||||
}
|
||||
|
||||
public long estabResets() {
|
||||
return estabResets;
|
||||
}
|
||||
|
||||
public long getEstabResets() {
|
||||
return estabResets();
|
||||
}
|
||||
|
||||
public long currEstab() {
|
||||
return currEstab;
|
||||
}
|
||||
|
||||
public long getCurrEstab() {
|
||||
return currEstab();
|
||||
}
|
||||
|
||||
public long inSegs() {
|
||||
return inSegs;
|
||||
}
|
||||
|
||||
public long getInSegs() {
|
||||
return inSegs();
|
||||
}
|
||||
|
||||
public long outSegs() {
|
||||
return outSegs;
|
||||
}
|
||||
|
||||
public long getOutSegs() {
|
||||
return outSegs();
|
||||
}
|
||||
|
||||
public long retransSegs() {
|
||||
return retransSegs;
|
||||
}
|
||||
|
||||
public long getRetransSegs() {
|
||||
return retransSegs();
|
||||
}
|
||||
|
||||
public long inErrs() {
|
||||
return inErrs;
|
||||
}
|
||||
|
||||
public long getInErrs() {
|
||||
return inErrs();
|
||||
}
|
||||
|
||||
public long outRsts() {
|
||||
return outRsts;
|
||||
}
|
||||
|
||||
public long getOutRsts() {
|
||||
return outRsts();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -102,7 +102,6 @@ public class NodeService extends AbstractComponent {
|
|||
monitorService.processService().info(),
|
||||
monitorService.jvmService().info(),
|
||||
threadPool.info(),
|
||||
monitorService.networkService().info(),
|
||||
transportService.info(),
|
||||
httpServer == null ? null : httpServer.info(),
|
||||
pluginService == null ? null : pluginService.info()
|
||||
|
@ -117,7 +116,6 @@ public class NodeService extends AbstractComponent {
|
|||
process ? monitorService.processService().info() : null,
|
||||
jvm ? monitorService.jvmService().info() : null,
|
||||
threadPool ? this.threadPool.info() : null,
|
||||
network ? monitorService.networkService().info() : null,
|
||||
transport ? transportService.info() : null,
|
||||
http ? (httpServer == null ? null : httpServer.info()) : null,
|
||||
plugin ? (pluginService == null ? null : pluginService.info()) : null
|
||||
|
@ -133,7 +131,6 @@ public class NodeService extends AbstractComponent {
|
|||
monitorService.processService().stats(),
|
||||
monitorService.jvmService().stats(),
|
||||
threadPool.stats(),
|
||||
monitorService.networkService().stats(),
|
||||
monitorService.fsService().stats(),
|
||||
transportService.stats(),
|
||||
httpServer == null ? null : httpServer.stats(),
|
||||
|
@ -151,7 +148,6 @@ public class NodeService extends AbstractComponent {
|
|||
process ? monitorService.processService().stats() : null,
|
||||
jvm ? monitorService.jvmService().stats() : null,
|
||||
threadPool ? this.threadPool.stats() : null,
|
||||
network ? monitorService.networkService().stats() : null,
|
||||
fs ? monitorService.fsService().stats() : null,
|
||||
transport ? transportService.stats() : null,
|
||||
http ? (httpServer == null ? null : httpServer.stats()) : null,
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.cli.Terminal;
|
|||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.http.client.HttpDownloadHelper;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.logging.log4j.LogConfigurator;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -72,7 +73,7 @@ public class PluginManager {
|
|||
// By default timeout is 0 which means no timeout
|
||||
public static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueMillis(0);
|
||||
|
||||
private static final ImmutableSet<Object> BLACKLIST = ImmutableSet.builder()
|
||||
private static final ImmutableSet<String> BLACKLIST = ImmutableSet.<String>builder()
|
||||
.add("elasticsearch",
|
||||
"elasticsearch.bat",
|
||||
"elasticsearch.in.sh",
|
||||
|
@ -80,6 +81,21 @@ public class PluginManager {
|
|||
"plugin.bat",
|
||||
"service.bat").build();
|
||||
|
||||
private static final ImmutableSet<String> OFFICIAL_PLUGINS = ImmutableSet.<String>builder()
|
||||
.add(
|
||||
"elasticsearch-analysis-icu",
|
||||
"elasticsearch-analysis-kuromoji",
|
||||
"elasticsearch-analysis-phonetic",
|
||||
"elasticsearch-analysis-smartcn",
|
||||
"elasticsearch-analysis-stempel",
|
||||
"elasticsearch-cloud-aws",
|
||||
"elasticsearch-cloud-azure",
|
||||
"elasticsearch-cloud-gce",
|
||||
"elasticsearch-delete-by-query",
|
||||
"elasticsearch-lang-javascript",
|
||||
"elasticsearch-lang-python"
|
||||
).build();
|
||||
|
||||
private final Environment environment;
|
||||
private String url;
|
||||
private OutputMode outputMode;
|
||||
|
@ -132,6 +148,10 @@ public class PluginManager {
|
|||
// ignore
|
||||
log("Failed: " + ExceptionsHelper.detailedMessage(e));
|
||||
}
|
||||
} else {
|
||||
if (PluginHandle.isOfficialPlugin(pluginHandle.repo, pluginHandle.user, pluginHandle.version)) {
|
||||
checkForOfficialPlugins(pluginHandle.name);
|
||||
}
|
||||
}
|
||||
|
||||
if (!downloaded) {
|
||||
|
@ -383,6 +403,15 @@ public class PluginManager {
|
|||
}
|
||||
}
|
||||
|
||||
protected static void checkForOfficialPlugins(String name) {
|
||||
// We make sure that users can use only new short naming for official plugins only
|
||||
if (!OFFICIAL_PLUGINS.contains(name)) {
|
||||
throw new IllegalArgumentException(name +
|
||||
" is not an official plugin so you should install it using elasticsearch/" +
|
||||
name + "/latest naming form.");
|
||||
}
|
||||
}
|
||||
|
||||
public Path[] getListInstalledPlugins() throws IOException {
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
|
||||
return Iterators.toArray(stream.iterator(), Path.class);
|
||||
|
@ -408,6 +437,7 @@ public class PluginManager {
|
|||
|
||||
public static void main(String[] args) {
|
||||
Tuple<Settings, Environment> initialSettings = InternalSettingsPreparer.prepareSettings(EMPTY_SETTINGS, true, Terminal.DEFAULT);
|
||||
LogConfigurator.configure(initialSettings.v1());
|
||||
|
||||
try {
|
||||
Files.createDirectories(initialSettings.v2().pluginsFile());
|
||||
|
@ -595,9 +625,15 @@ public class PluginManager {
|
|||
SysOut.println(" -h, --help : Prints this help message");
|
||||
SysOut.newline();
|
||||
SysOut.println(" [*] Plugin name could be:");
|
||||
SysOut.println(" elasticsearch/plugin/version for official elasticsearch plugins (download from download.elasticsearch.org)");
|
||||
SysOut.println(" elasticsearch-plugin-name for Elasticsearch 2.0 Core plugin (download from download.elastic.co)");
|
||||
SysOut.println(" elasticsearch/plugin/version for elasticsearch commercial plugins (download from download.elastic.co)");
|
||||
SysOut.println(" groupId/artifactId/version for community plugins (download from maven central or oss sonatype)");
|
||||
SysOut.println(" username/repository for site plugins (download from github master)");
|
||||
SysOut.newline();
|
||||
SysOut.println("Elasticsearch Core plugins:");
|
||||
for (String o : OFFICIAL_PLUGINS) {
|
||||
SysOut.println(" - " + o);
|
||||
}
|
||||
|
||||
if (message != null) {
|
||||
SysOut.newline();
|
||||
|
@ -650,17 +686,26 @@ public class PluginManager {
|
|||
List<URL> urls() {
|
||||
List<URL> urls = new ArrayList<>();
|
||||
if (version != null) {
|
||||
// Elasticsearch download service
|
||||
addUrl(urls, "http://download.elasticsearch.org/" + user + "/" + repo + "/" + repo + "-" + version + ".zip");
|
||||
// Maven central repository
|
||||
addUrl(urls, "http://search.maven.org/remotecontent?filepath=" + user.replace('.', '/') + "/" + repo + "/" + version + "/" + repo + "-" + version + ".zip");
|
||||
// Sonatype repository
|
||||
addUrl(urls, "https://oss.sonatype.org/service/local/repositories/releases/content/" + user.replace('.', '/') + "/" + repo + "/" + version + "/" + repo + "-" + version + ".zip");
|
||||
// Github repository
|
||||
addUrl(urls, "https://github.com/" + user + "/" + repo + "/archive/" + version + ".zip");
|
||||
// Elasticsearch new download service uses groupId org.elasticsearch.plugins from 2.0.0
|
||||
if (user == null) {
|
||||
// TODO Update to https
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/org.elasticsearch.plugins/%1$s/%1$s-%2$s.zip", repo, version));
|
||||
} else {
|
||||
// Elasticsearch old download service
|
||||
// TODO Update to https
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/%1$s/%2$s/%2$s-%3$s.zip", user, repo, version));
|
||||
// Maven central repository
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://search.maven.org/remotecontent?filepath=%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), repo, version));
|
||||
// Sonatype repository
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://oss.sonatype.org/service/local/repositories/releases/content/%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), repo, version));
|
||||
// Github repository
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/%3$s.zip", user, repo, version));
|
||||
}
|
||||
}
|
||||
if (user != null) {
|
||||
// Github repository for master branch (assume site)
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/master.zip", user, repo));
|
||||
}
|
||||
// Github repository for master branch (assume site)
|
||||
addUrl(urls, "https://github.com/" + user + "/" + repo + "/archive/master.zip");
|
||||
return urls;
|
||||
}
|
||||
|
||||
|
@ -706,6 +751,10 @@ public class PluginManager {
|
|||
}
|
||||
}
|
||||
|
||||
if (isOfficialPlugin(repo, user, version)) {
|
||||
return new PluginHandle(repo, Version.CURRENT.number(), null, repo);
|
||||
}
|
||||
|
||||
if (repo.startsWith("elasticsearch-")) {
|
||||
// remove elasticsearch- prefix
|
||||
String endname = repo.substring("elasticsearch-".length());
|
||||
|
@ -720,6 +769,10 @@ public class PluginManager {
|
|||
|
||||
return new PluginHandle(repo, version, user, repo);
|
||||
}
|
||||
|
||||
static boolean isOfficialPlugin(String repo, String user, String version) {
|
||||
return version == null && user == null && !Strings.isNullOrEmpty(repo);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ import org.elasticsearch.index.refresh.RefreshStats;
|
|||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.suggest.stats.SuggestStats;
|
||||
import org.elasticsearch.indices.NodeIndicesStats;
|
||||
import org.elasticsearch.monitor.fs.FsStats;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmStats;
|
||||
import org.elasticsearch.monitor.os.OsInfo;
|
||||
|
@ -224,7 +224,7 @@ public class RestNodesAction extends AbstractCatAction {
|
|||
ProcessInfo processInfo = info == null ? null : info.getProcess();
|
||||
|
||||
JvmStats jvmStats = stats == null ? null : stats.getJvm();
|
||||
FsStats fsStats = stats == null ? null : stats.getFs();
|
||||
FsInfo fsInfo = stats == null ? null : stats.getFs();
|
||||
OsStats osStats = stats == null ? null : stats.getOs();
|
||||
ProcessStats processStats = stats == null ? null : stats.getProcess();
|
||||
NodeIndicesStats indicesStats = stats == null ? null : stats.getIndices();
|
||||
|
@ -244,7 +244,7 @@ public class RestNodesAction extends AbstractCatAction {
|
|||
table.addCell(node.getVersion().number());
|
||||
table.addCell(info == null ? null : info.getBuild().hashShort());
|
||||
table.addCell(jvmInfo == null ? null : jvmInfo.version());
|
||||
table.addCell(fsStats == null ? null : fsStats.getTotal().getAvailable());
|
||||
table.addCell(fsInfo == null ? null : fsInfo.getTotal().getAvailable());
|
||||
table.addCell(jvmStats == null ? null : jvmStats.getMem().getHeapUsed());
|
||||
table.addCell(jvmStats == null ? null : jvmStats.getMem().getHeapUsedPercent());
|
||||
table.addCell(jvmInfo == null ? null : jvmInfo.getMem().getHeapMax());
|
||||
|
|
|
@ -95,9 +95,9 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
private final ImmutableMap<String, ScriptEngineService> scriptEnginesByLang;
|
||||
private final ImmutableMap<String, ScriptEngineService> scriptEnginesByExt;
|
||||
|
||||
private final ConcurrentMap<CacheKey, CompiledScript> staticCache = ConcurrentCollections.newConcurrentMap();
|
||||
private final ConcurrentMap<String, CompiledScript> staticCache = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
private final Cache<CacheKey, CompiledScript> cache;
|
||||
private final Cache<String, CompiledScript> cache;
|
||||
private final Path scriptsDirectory;
|
||||
|
||||
private final ScriptModes scriptModes;
|
||||
|
@ -266,7 +266,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
}
|
||||
|
||||
ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(lang);
|
||||
CacheKey cacheKey = newCacheKey(scriptEngineService, script.getScript());
|
||||
String cacheKey = getCacheKey(scriptEngineService, script.getScript());
|
||||
|
||||
if (script.getType() == ScriptType.FILE) {
|
||||
CompiledScript compiled = staticCache.get(cacheKey); //On disk scripts will be loaded into the staticCache by the listener
|
||||
|
@ -281,7 +281,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
if (script.getType() == ScriptType.INDEXED) {
|
||||
final IndexedScript indexedScript = new IndexedScript(lang, script.getScript());
|
||||
code = getScriptFromIndex(indexedScript.lang, indexedScript.id);
|
||||
cacheKey = newCacheKey(scriptEngineService, code);
|
||||
cacheKey = getCacheKey(scriptEngineService, code);
|
||||
}
|
||||
|
||||
CompiledScript compiled = cache.getIfPresent(cacheKey);
|
||||
|
@ -462,10 +462,10 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
* {@code ScriptEngineService}'s {@code scriptRemoved} method when the
|
||||
* script has been removed from the cache
|
||||
*/
|
||||
private class ScriptCacheRemovalListener implements RemovalListener<CacheKey, CompiledScript> {
|
||||
private class ScriptCacheRemovalListener implements RemovalListener<String, CompiledScript> {
|
||||
|
||||
@Override
|
||||
public void onRemoval(RemovalNotification<CacheKey, CompiledScript> notification) {
|
||||
public void onRemoval(RemovalNotification<String, CompiledScript> notification) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("notifying script services of script removal due to: [{}]", notification.getCause());
|
||||
}
|
||||
|
@ -513,7 +513,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
logger.info("compiling script file [{}]", file.toAbsolutePath());
|
||||
try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), Charsets.UTF_8)) {
|
||||
String script = Streams.copyToString(reader);
|
||||
CacheKey cacheKey = newCacheKey(engineService, scriptNameExt.v1());
|
||||
String cacheKey = getCacheKey(engineService, scriptNameExt.v1());
|
||||
staticCache.put(cacheKey, new CompiledScript(engineService.types()[0], engineService.compile(script)));
|
||||
}
|
||||
} else {
|
||||
|
@ -538,7 +538,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2());
|
||||
assert engineService != null;
|
||||
logger.info("removing script file [{}]", file.toAbsolutePath());
|
||||
staticCache.remove(newCacheKey(engineService, scriptNameExt.v1()));
|
||||
staticCache.remove(getCacheKey(engineService, scriptNameExt.v1()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -598,32 +598,9 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private static CacheKey newCacheKey(ScriptEngineService engineService, String script) {
|
||||
return new CacheKey(engineService.types()[0], script);
|
||||
}
|
||||
|
||||
private static class CacheKey {
|
||||
public final String lang;
|
||||
public final String script;
|
||||
|
||||
public CacheKey(String lang, String script) {
|
||||
this.lang = lang;
|
||||
this.script = script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (! (o instanceof CacheKey)) {
|
||||
return false;
|
||||
}
|
||||
CacheKey other = (CacheKey) o;
|
||||
return lang.equals(other.lang) && script.equals(other.script);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return lang.hashCode() + 31 * script.hashCode();
|
||||
}
|
||||
private static String getCacheKey(ScriptEngineService scriptEngineService, String script) {
|
||||
String lang = scriptEngineService.types()[0];
|
||||
return lang + ":" + script;
|
||||
}
|
||||
|
||||
private static class IndexedScript {
|
||||
|
|
|
@ -267,22 +267,39 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
|
||||
public ScrollQueryFetchSearchResult executeScan(InternalScrollSearchRequest request) {
|
||||
final SearchContext context = findContext(request.id());
|
||||
ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
contextProcessing(context);
|
||||
try {
|
||||
processScroll(request, context);
|
||||
if (context.searchType() == SearchType.QUERY_THEN_FETCH) {
|
||||
// first scanning, reset the from to 0
|
||||
context.searchType(SearchType.SCAN);
|
||||
context.from(0);
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
try {
|
||||
if (context.searchType() == SearchType.QUERY_THEN_FETCH) {
|
||||
// first scanning, reset the from to 0
|
||||
context.searchType(SearchType.SCAN);
|
||||
context.from(0);
|
||||
}
|
||||
queryPhase.execute(context);
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedQueryPhase(context);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
queryPhase.execute(context);
|
||||
shortcutDocIdsToLoadForScanning(context);
|
||||
fetchPhase.execute(context);
|
||||
if (context.scroll() == null || context.fetchResult().hits().hits().length < context.size()) {
|
||||
freeContext(request.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
long queryFinishTime = System.nanoTime();
|
||||
shardSearchStats.onQueryPhase(context, queryFinishTime - time);
|
||||
shardSearchStats.onPreFetchPhase(context);
|
||||
try {
|
||||
shortcutDocIdsToLoadForScanning(context);
|
||||
fetchPhase.execute(context);
|
||||
if (context.scroll() == null || context.fetchResult().hits().hits().length < context.size()) {
|
||||
freeContext(request.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
shardSearchStats.onFailedFetchPhase(context);
|
||||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
shardSearchStats.onFetchPhase(context, System.nanoTime() - queryFinishTime);
|
||||
return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()), context.shardTarget());
|
||||
} catch (Throwable e) {
|
||||
logger.trace("Scan phase failed", e);
|
||||
|
@ -569,6 +586,9 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
boolean success = false;
|
||||
try {
|
||||
putContext(context);
|
||||
if (request.scroll() != null) {
|
||||
context.indexShard().searchService().onNewScrollContext(context);
|
||||
}
|
||||
context.indexShard().searchService().onNewContext(context);
|
||||
success = true;
|
||||
return context;
|
||||
|
@ -643,6 +663,9 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
if (context != null) {
|
||||
try {
|
||||
context.indexShard().searchService().onFreeContext(context);
|
||||
if (context.scroll() != null) {
|
||||
context.indexShard().searchService().onFreeScrollContext(context);
|
||||
}
|
||||
} finally {
|
||||
context.close();
|
||||
}
|
||||
|
|
|
@ -64,6 +64,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucke
|
|||
import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelModule;
|
||||
|
||||
|
@ -118,6 +119,7 @@ public class AggregationModule extends AbstractModule implements SpawnModules{
|
|||
pipelineAggParsers.add(MovAvgParser.class);
|
||||
pipelineAggParsers.add(CumulativeSumParser.class);
|
||||
pipelineAggParsers.add(BucketScriptParser.class);
|
||||
pipelineAggParsers.add(BucketSelectorParser.class);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -69,6 +69,7 @@ import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSu
|
|||
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.derivative.InternalDerivative;
|
||||
import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.models.TransportMovAvgModelModule;
|
||||
|
||||
|
@ -131,6 +132,7 @@ public class TransportAggregationModule extends AbstractModule implements SpawnM
|
|||
MovAvgPipelineAggregator.registerStreams();
|
||||
CumulativeSumPipelineAggregator.registerStreams();
|
||||
BucketScriptPipelineAggregator.registerStreams();
|
||||
BucketSelectorPipelineAggregator.registerStreams();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucke
|
|||
import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgBuilder;
|
||||
|
||||
public final class PipelineAggregatorBuilders {
|
||||
|
@ -61,6 +62,10 @@ public final class PipelineAggregatorBuilders {
|
|||
return new BucketScriptBuilder(name);
|
||||
}
|
||||
|
||||
public static final BucketSelectorBuilder having(String name) {
|
||||
return new BucketSelectorBuilder(name);
|
||||
}
|
||||
|
||||
public static final CumulativeSumBuilder cumulativeSum(String name) {
|
||||
return new CumulativeSumBuilder(name);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.pipeline.having;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class BucketSelectorBuilder extends PipelineAggregatorBuilder<BucketSelectorBuilder> {
|
||||
|
||||
private GapPolicy gapPolicy;
|
||||
private Script script;
|
||||
private Map<String, String> bucketsPathsMap;
|
||||
|
||||
public BucketSelectorBuilder(String name) {
|
||||
super(name, BucketSelectorPipelineAggregator.TYPE.name());
|
||||
}
|
||||
|
||||
public BucketSelectorBuilder script(Script script) {
|
||||
this.script = script;
|
||||
return this;
|
||||
}
|
||||
|
||||
public BucketSelectorBuilder gapPolicy(GapPolicy gapPolicy) {
|
||||
this.gapPolicy = gapPolicy;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the paths to the buckets to use for this pipeline aggregator. The
|
||||
* map given to this method must contain script variable name as keys with
|
||||
* bucket paths values to the metrics to use for each variable.
|
||||
*/
|
||||
public BucketSelectorBuilder setBucketsPathsMap(Map<String, String> bucketsPathsMap) {
|
||||
this.bucketsPathsMap = bucketsPathsMap;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params builderParams) throws IOException {
|
||||
if (script != null) {
|
||||
builder.field(ScriptField.SCRIPT.getPreferredName(), script);
|
||||
}
|
||||
if (gapPolicy != null) {
|
||||
builder.field(BucketSelectorParser.GAP_POLICY.getPreferredName(), gapPolicy.getName());
|
||||
}
|
||||
if (bucketsPathsMap != null) {
|
||||
builder.field(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName(), bucketsPathsMap);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.pipeline.having;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class BucketSelectorParser implements PipelineAggregator.Parser {
|
||||
|
||||
public static final ParseField FORMAT = new ParseField("format");
|
||||
public static final ParseField GAP_POLICY = new ParseField("gap_policy");
|
||||
public static final ParseField PARAMS_FIELD = new ParseField("params");
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return BucketSelectorPipelineAggregator.TYPE.name();
|
||||
}
|
||||
|
||||
@Override
|
||||
public PipelineAggregatorFactory parse(String reducerName, XContentParser parser, SearchContext context) throws IOException {
|
||||
XContentParser.Token token;
|
||||
Script script = null;
|
||||
String currentFieldName = null;
|
||||
Map<String, String> bucketsPathsMap = null;
|
||||
GapPolicy gapPolicy = GapPolicy.SKIP;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
|
||||
bucketsPathsMap = new HashMap<>();
|
||||
bucketsPathsMap.put("_value", parser.text());
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
|
||||
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, context.parseFieldMatcher());
|
||||
} else {
|
||||
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
|
||||
+ currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
|
||||
List<String> paths = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
String path = parser.text();
|
||||
paths.add(path);
|
||||
}
|
||||
bucketsPathsMap = new HashMap<>();
|
||||
for (int i = 0; i < paths.size(); i++) {
|
||||
bucketsPathsMap.put("_value" + i, paths.get(i));
|
||||
}
|
||||
} else {
|
||||
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
|
||||
+ currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (context.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, context.parseFieldMatcher());
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
|
||||
Map<String, Object> map = parser.map();
|
||||
bucketsPathsMap = new HashMap<>();
|
||||
for (Map.Entry<String, Object> entry : map.entrySet()) {
|
||||
bucketsPathsMap.put(entry.getKey(), String.valueOf(entry.getValue()));
|
||||
}
|
||||
} else {
|
||||
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
|
||||
+ currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
} else {
|
||||
throw new SearchParseException(context, "Unexpected token " + token + " in [" + reducerName + "].",
|
||||
parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
|
||||
if (bucketsPathsMap == null) {
|
||||
throw new SearchParseException(context, "Missing required field [" + BUCKETS_PATH.getPreferredName()
|
||||
+ "] for bucket_selector aggregation [" + reducerName + "]", parser.getTokenLocation());
|
||||
}
|
||||
|
||||
if (script == null) {
|
||||
throw new SearchParseException(context, "Missing required field [" + ScriptField.SCRIPT.getPreferredName()
|
||||
+ "] for bucket_selector aggregation [" + reducerName + "]", parser.getTokenLocation());
|
||||
}
|
||||
|
||||
return new BucketSelectorPipelineAggregator.Factory(reducerName, bucketsPathsMap, script, gapPolicy);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,164 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.pipeline.having;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.expression.ExpressionScriptEngineService;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resolveBucketValue;
|
||||
|
||||
public class BucketSelectorPipelineAggregator extends PipelineAggregator {
|
||||
|
||||
public final static Type TYPE = new Type("bucket_selector");
|
||||
|
||||
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
|
||||
@Override
|
||||
public BucketSelectorPipelineAggregator readResult(StreamInput in) throws IOException {
|
||||
BucketSelectorPipelineAggregator result = new BucketSelectorPipelineAggregator();
|
||||
result.readFrom(in);
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
public static void registerStreams() {
|
||||
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
|
||||
}
|
||||
|
||||
private static final Function<Aggregation, InternalAggregation> FUNCTION = new Function<Aggregation, InternalAggregation>() {
|
||||
@Override
|
||||
public InternalAggregation apply(Aggregation input) {
|
||||
return (InternalAggregation) input;
|
||||
}
|
||||
};
|
||||
|
||||
private GapPolicy gapPolicy;
|
||||
|
||||
private Script script;
|
||||
|
||||
private Map<String, String> bucketsPathsMap;
|
||||
|
||||
public BucketSelectorPipelineAggregator() {
|
||||
}
|
||||
|
||||
public BucketSelectorPipelineAggregator(String name, Map<String, String> bucketsPathsMap, Script script, GapPolicy gapPolicy,
|
||||
Map<String, Object> metadata) {
|
||||
super(name, bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]), metadata);
|
||||
this.bucketsPathsMap = bucketsPathsMap;
|
||||
this.script = script;
|
||||
this.gapPolicy = gapPolicy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Type type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
|
||||
InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket> originalAgg = (InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>) aggregation;
|
||||
List<? extends Bucket> buckets = originalAgg.getBuckets();
|
||||
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS);
|
||||
List newBuckets = new ArrayList<>();
|
||||
for (Bucket bucket : buckets) {
|
||||
Map<String, Object> vars = new HashMap<>();
|
||||
if (script.getParams() != null) {
|
||||
vars.putAll(script.getParams());
|
||||
}
|
||||
for (Map.Entry<String, String> entry : bucketsPathsMap.entrySet()) {
|
||||
String varName = entry.getKey();
|
||||
String bucketsPath = entry.getValue();
|
||||
Double value = resolveBucketValue(originalAgg, bucket, bucketsPath, gapPolicy);
|
||||
vars.put(varName, value);
|
||||
}
|
||||
ExecutableScript executableScript = reduceContext.scriptService().executable(compiledScript, vars);
|
||||
Object scriptReturnValue = executableScript.run();
|
||||
final boolean keepBucket;
|
||||
if (ExpressionScriptEngineService.NAME.equals(script.getLang())) {
|
||||
double scriptDoubleValue = (double) scriptReturnValue;
|
||||
keepBucket = scriptDoubleValue == 1.0;
|
||||
} else {
|
||||
keepBucket = (boolean) scriptReturnValue;
|
||||
}
|
||||
if (keepBucket) {
|
||||
newBuckets.add(bucket);
|
||||
}
|
||||
}
|
||||
return originalAgg.create(newBuckets);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
script.writeTo(out);
|
||||
gapPolicy.writeTo(out);
|
||||
out.writeGenericValue(bucketsPathsMap);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
script = Script.readScript(in);
|
||||
gapPolicy = GapPolicy.readFrom(in);
|
||||
bucketsPathsMap = (Map<String, String>) in.readGenericValue();
|
||||
}
|
||||
|
||||
public static class Factory extends PipelineAggregatorFactory {
|
||||
|
||||
private Script script;
|
||||
private GapPolicy gapPolicy;
|
||||
private Map<String, String> bucketsPathsMap;
|
||||
|
||||
public Factory(String name, Map<String, String> bucketsPathsMap, Script script, GapPolicy gapPolicy) {
|
||||
super(name, TYPE.name(), bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]));
|
||||
this.bucketsPathsMap = bucketsPathsMap;
|
||||
this.script = script;
|
||||
this.gapPolicy = gapPolicy;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException {
|
||||
return new BucketSelectorPipelineAggregator(name, bucketsPathsMap, script, gapPolicy, metaData);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -38,7 +38,7 @@ import com.google.common.collect.ImmutableList;
|
|||
*/
|
||||
public class SnapshotInfo implements ToXContent, Streamable {
|
||||
|
||||
private static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime");
|
||||
private static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strictDateOptionalTime");
|
||||
|
||||
private String name;
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -47,6 +47,11 @@ grant codeBase "${es.security.jar.elasticsearch.securemock}" {
|
|||
permission java.lang.RuntimePermission "reflectionFactoryAccess";
|
||||
};
|
||||
|
||||
grant codeBase "${es.security.jar.bouncycastle.bcprov}" {
|
||||
// needed to allow installation of bouncycastle crypto provider
|
||||
permission java.security.SecurityPermission "putProviderProperty.BC";
|
||||
};
|
||||
|
||||
//// Everything else:
|
||||
|
||||
grant {
|
||||
|
|
|
@ -75,7 +75,10 @@ public class NamingConventionTests extends ElasticsearchTestCase {
|
|||
if (filename.endsWith(".class")) {
|
||||
Class<?> clazz = loadClass(filename);
|
||||
if (Modifier.isAbstract(clazz.getModifiers()) == false && Modifier.isInterface(clazz.getModifiers()) == false) {
|
||||
if ((clazz.getName().endsWith("Tests") || clazz.getName().endsWith("Test"))) { // don't worry about the ones that match the pattern
|
||||
if (clazz.getName().endsWith("Tests") ||
|
||||
clazz.getName().endsWith("IT") ||
|
||||
clazz.getName().endsWith("Test")) { // don't worry about the ones that match the pattern
|
||||
|
||||
if (isTestCase(clazz) == false) {
|
||||
notImplementing.add(clazz);
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -273,7 +273,7 @@ public class GetTermVectorsTests extends AbstractTermVectorsTests {
|
|||
ft.setStoreTermVectorPayloads(storePayloads);
|
||||
ft.setStoreTermVectorPositions(storePositions);
|
||||
|
||||
String optionString = AbstractFieldMapper.termVectorOptionsToString(ft);
|
||||
String optionString = FieldMapper.termVectorOptionsToString(ft);
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
|
|
|
@ -37,8 +37,8 @@ import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TypeParsers;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction;
|
||||
|
@ -266,7 +266,7 @@ public class TermVectorsUnitTests extends ElasticsearchTestCase {
|
|||
ft.setStoreTermVectorPayloads(true);
|
||||
ft.setStoreTermVectors(true);
|
||||
ft.setStoreTermVectorPositions(true);
|
||||
String ftOpts = AbstractFieldMapper.termVectorOptionsToString(ft);
|
||||
String ftOpts = FieldMapper.termVectorOptionsToString(ft);
|
||||
assertThat("with_positions_payloads", equalTo(ftOpts));
|
||||
AllFieldMapper.Builder builder = new AllFieldMapper.Builder(null);
|
||||
boolean exceptiontrown = false;
|
||||
|
@ -285,7 +285,7 @@ public class TermVectorsUnitTests extends ElasticsearchTestCase {
|
|||
ft.setStoreTermVectorPayloads(true);
|
||||
ft.setStoreTermVectors(true);
|
||||
ft.setStoreTermVectorPositions(false);
|
||||
String ftOpts = AbstractFieldMapper.termVectorOptionsToString(ft);
|
||||
String ftOpts = FieldMapper.termVectorOptionsToString(ft);
|
||||
assertThat(ftOpts, equalTo("with_offsets"));
|
||||
}
|
||||
|
||||
|
|
|
@ -19,8 +19,13 @@
|
|||
|
||||
package org.elasticsearch.action.update;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -28,9 +33,10 @@ import org.junit.Test;
|
|||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class UpdateRequestTests extends ElasticsearchTestCase {
|
||||
|
||||
|
@ -119,4 +125,45 @@ public class UpdateRequestTests extends ElasticsearchTestCase {
|
|||
assertThat(doc.get("field1").toString(), equalTo("value1"));
|
||||
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
}
|
||||
|
||||
@Test // Related to issue 3256
|
||||
public void testUpdateRequestWithTTL() throws Exception {
|
||||
long providedTTLValue = randomIntBetween(500, 1000);
|
||||
Settings settings = settings(Version.CURRENT).build();
|
||||
|
||||
UpdateHelper updateHelper = new UpdateHelper(settings, null);
|
||||
|
||||
// We just upsert one document with ttl
|
||||
IndexRequest indexRequest = new IndexRequest("test", "type1", "1")
|
||||
.source(jsonBuilder().startObject().field("foo", "bar").endObject())
|
||||
.ttl(providedTTLValue);
|
||||
UpdateRequest updateRequest = new UpdateRequest("test", "type1", "1")
|
||||
.doc(jsonBuilder().startObject().field("fooz", "baz").endObject())
|
||||
.upsert(indexRequest);
|
||||
|
||||
// We simulate that the document is not existing yet
|
||||
GetResult getResult = new GetResult("test", "type1", "1", 0, false, null, null);
|
||||
UpdateHelper.Result result = updateHelper.prepare(updateRequest, getResult);
|
||||
Streamable action = result.action();
|
||||
assertThat(action, instanceOf(IndexRequest.class));
|
||||
IndexRequest indexAction = (IndexRequest) action;
|
||||
assertThat(indexAction.ttl(), is(providedTTLValue));
|
||||
|
||||
// We just upsert one document with ttl using a script
|
||||
indexRequest = new IndexRequest("test", "type1", "2")
|
||||
.source(jsonBuilder().startObject().field("foo", "bar").endObject())
|
||||
.ttl(providedTTLValue);
|
||||
updateRequest = new UpdateRequest("test", "type1", "2")
|
||||
.upsert(indexRequest)
|
||||
.script(new Script(";"))
|
||||
.scriptedUpsert(true);
|
||||
|
||||
// We simulate that the document is not existing yet
|
||||
getResult = new GetResult("test", "type1", "2", 0, false, null, null);
|
||||
result = updateHelper.prepare(updateRequest, getResult);
|
||||
action = result.action();
|
||||
assertThat(action, instanceOf(IndexRequest.class));
|
||||
indexAction = (IndexRequest) action;
|
||||
assertThat(indexAction.ttl(), is(providedTTLValue));
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue