mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-26 06:46:10 +00:00
Merge branch 'master' into feature/ingest
This commit is contained in:
commit
8240031216
@ -184,6 +184,12 @@ tasks.idea.doLast {
|
||||
if (System.getProperty('idea.active') != null && ideaMarker.exists() == false) {
|
||||
throw new GradleException('You must run gradle idea from the root of elasticsearch before importing into IntelliJ')
|
||||
}
|
||||
// add buildSrc itself as a groovy project
|
||||
task buildSrcIdea(type: GradleBuild) {
|
||||
buildFile = 'buildSrc/build.gradle'
|
||||
tasks = ['cleanIdea', 'ideaModule']
|
||||
}
|
||||
tasks.idea.dependsOn(buildSrcIdea)
|
||||
|
||||
|
||||
// eclipse configuration
|
||||
|
@ -525,7 +525,7 @@ class ClusterFormationTasks {
|
||||
}
|
||||
}
|
||||
|
||||
static String pluginTaskName(String action, String name, String suffix) {
|
||||
public static String pluginTaskName(String action, String name, String suffix) {
|
||||
// replace every dash followed by a character with just the uppercase character
|
||||
String camelName = name.replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) }
|
||||
return action + camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1) + suffix
|
||||
|
@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.artifacts.Dependency
|
||||
import org.gradle.api.artifacts.ProjectDependency
|
||||
import org.gradle.api.tasks.Copy
|
||||
|
||||
/**
|
||||
* A plugin to run messy tests, which are generally tests that depend on plugins.
|
||||
*
|
||||
* This plugin will add the same test configuration as standalone tests, except
|
||||
* also add the plugin-metadata and properties files for each plugin project
|
||||
* dependency.
|
||||
*/
|
||||
class MessyTestPlugin extends StandaloneTestPlugin {
|
||||
@Override
|
||||
public void apply(Project project) {
|
||||
super.apply(project)
|
||||
|
||||
project.configurations.testCompile.dependencies.all { Dependency dep ->
|
||||
// this closure is run every time a compile dependency is added
|
||||
if (dep instanceof ProjectDependency && dep.dependencyProject.plugins.hasPlugin(PluginBuildPlugin)) {
|
||||
project.gradle.projectsEvaluated {
|
||||
addPluginResources(project, dep.dependencyProject)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static addPluginResources(Project project, Project pluginProject) {
|
||||
String outputDir = "generated-resources/${pluginProject.name}"
|
||||
String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata")
|
||||
Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class)
|
||||
copyPluginMetadata.into(outputDir)
|
||||
copyPluginMetadata.from(pluginProject.tasks.pluginProperties)
|
||||
copyPluginMetadata.from(pluginProject.file('src/main/plugin-metadata'))
|
||||
project.sourceSets.test.output.dir(outputDir, builtBy: taskName)
|
||||
|
||||
// add each generated dir to the test classpath in IDEs
|
||||
//project.eclipse.classpath.sourceSets = [project.sourceSets.test]
|
||||
project.idea.module.singleEntryLibraries= ['TEST': [project.file(outputDir)]]
|
||||
}
|
||||
}
|
@ -46,6 +46,8 @@ public class StandaloneTestBasePlugin implements Plugin<Project> {
|
||||
|
||||
project.eclipse.classpath.sourceSets = [project.sourceSets.test]
|
||||
project.eclipse.classpath.plusConfigurations = [project.configurations.testRuntime]
|
||||
project.idea.module.testSourceDirs += project.sourceSets.test.java.srcDirs
|
||||
project.idea.module.scopes['TEST'] = [plus: [project.configurations.testRuntime]]
|
||||
|
||||
PrecommitTasks.create(project, false)
|
||||
project.check.dependsOn(project.precommit)
|
||||
|
@ -0,0 +1,20 @@
|
||||
#
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
implementation-class=org.elasticsearch.gradle.test.MessyTestPlugin
|
@ -25,8 +25,10 @@ import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.RoutingMissingException;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.delete.TransportDeleteAction;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.index.TransportIndexAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.replication.TransportReplicationAction;
|
||||
import org.elasticsearch.action.update.UpdateHelper;
|
||||
@ -49,8 +51,6 @@ import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
@ -164,7 +164,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
|
||||
|
||||
try {
|
||||
// add the response
|
||||
final WriteResult<DeleteResponse> writeResult = shardDeleteOperation(request, deleteRequest, indexShard);
|
||||
final WriteResult<DeleteResponse> writeResult = TransportDeleteAction.executeDeleteRequestOnPrimary(deleteRequest, indexShard);
|
||||
DeleteResponse deleteResponse = writeResult.response();
|
||||
location = locationToSync(location, writeResult.location);
|
||||
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_DELETE, deleteResponse));
|
||||
@ -304,7 +304,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
|
||||
assert preVersionTypes[requestIndex] != null;
|
||||
}
|
||||
|
||||
processAfter(request.refresh(), indexShard, location);
|
||||
processAfterWrite(request.refresh(), indexShard, location);
|
||||
BulkItemResponse[] responses = new BulkItemResponse[request.items().length];
|
||||
BulkItemRequest[] items = request.items();
|
||||
for (int i = 0; i < items.length; i++) {
|
||||
@ -320,7 +320,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
|
||||
}
|
||||
}
|
||||
|
||||
private WriteResult shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, ClusterState clusterState,
|
||||
private WriteResult<IndexResponse> shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, ClusterState clusterState,
|
||||
IndexShard indexShard, boolean processed) throws Throwable {
|
||||
|
||||
// validate, if routing is required, that we got routing
|
||||
@ -334,21 +334,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
|
||||
if (!processed) {
|
||||
indexRequest.process(clusterState.metaData(), mappingMd, allowIdGeneration, request.index());
|
||||
}
|
||||
|
||||
return executeIndexRequestOnPrimary(indexRequest, indexShard);
|
||||
}
|
||||
|
||||
private WriteResult<DeleteResponse> shardDeleteOperation(BulkShardRequest request, DeleteRequest deleteRequest, IndexShard indexShard) {
|
||||
Engine.Delete delete = indexShard.prepareDelete(deleteRequest.type(), deleteRequest.id(), deleteRequest.version(), deleteRequest.versionType(), Engine.Operation.Origin.PRIMARY);
|
||||
indexShard.delete(delete);
|
||||
// update the request with the version so it will go to the replicas
|
||||
deleteRequest.versionType(delete.versionType().versionTypeForReplicationAndRecovery());
|
||||
deleteRequest.version(delete.version());
|
||||
|
||||
assert deleteRequest.versionType().validateVersionForWrites(deleteRequest.version());
|
||||
|
||||
DeleteResponse deleteResponse = new DeleteResponse(request.index(), deleteRequest.type(), deleteRequest.id(), delete.version(), delete.found());
|
||||
return new WriteResult(deleteResponse, delete.getTranslogLocation());
|
||||
return TransportIndexAction.executeIndexRequestOnPrimary(indexRequest, indexShard, mappingUpdatedAction);
|
||||
}
|
||||
|
||||
static class UpdateResult {
|
||||
@ -424,7 +410,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
|
||||
case DELETE:
|
||||
DeleteRequest deleteRequest = translate.action();
|
||||
try {
|
||||
WriteResult result = shardDeleteOperation(bulkShardRequest, deleteRequest, indexShard);
|
||||
WriteResult<DeleteResponse> result = TransportDeleteAction.executeDeleteRequestOnPrimary(deleteRequest, indexShard);
|
||||
return new UpdateResult(translate, deleteRequest, result);
|
||||
} catch (Throwable t) {
|
||||
t = ExceptionsHelper.unwrapCause(t);
|
||||
@ -457,15 +443,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
|
||||
if (item.request() instanceof IndexRequest) {
|
||||
IndexRequest indexRequest = (IndexRequest) item.request();
|
||||
try {
|
||||
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, indexRequest.source()).index(shardId.getIndex()).type(indexRequest.type()).id(indexRequest.id())
|
||||
.routing(indexRequest.routing()).parent(indexRequest.parent()).timestamp(indexRequest.timestamp()).ttl(indexRequest.ttl());
|
||||
|
||||
final Engine.Index operation = indexShard.prepareIndex(sourceToParse, indexRequest.version(), indexRequest.versionType(), Engine.Operation.Origin.REPLICA);
|
||||
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
|
||||
if (update != null) {
|
||||
throw new RetryOnReplicaException(shardId, "Mappings are not available on the replica yet, triggered update: " + update);
|
||||
}
|
||||
indexShard.index(operation);
|
||||
Engine.Index operation = TransportIndexAction.executeIndexRequestOnReplica(indexRequest, indexShard);
|
||||
location = locationToSync(location, operation.getTranslogLocation());
|
||||
} catch (Throwable e) {
|
||||
// if its not an ignore replica failure, we need to make sure to bubble up the failure
|
||||
@ -477,7 +455,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
|
||||
} else if (item.request() instanceof DeleteRequest) {
|
||||
DeleteRequest deleteRequest = (DeleteRequest) item.request();
|
||||
try {
|
||||
Engine.Delete delete = indexShard.prepareDelete(deleteRequest.type(), deleteRequest.id(), deleteRequest.version(), deleteRequest.versionType(), Engine.Operation.Origin.REPLICA);
|
||||
Engine.Delete delete = TransportDeleteAction.executeDeleteRequestOnReplica(deleteRequest, indexShard);
|
||||
indexShard.delete(delete);
|
||||
location = locationToSync(location, delete.getTranslogLocation());
|
||||
} catch (Throwable e) {
|
||||
@ -492,7 +470,7 @@ public class TransportShardBulkAction extends TransportReplicationAction<BulkSha
|
||||
}
|
||||
}
|
||||
|
||||
processAfter(request.refresh(), indexShard, location);
|
||||
processAfterWrite(request.refresh(), indexShard, location);
|
||||
}
|
||||
|
||||
private void applyVersion(BulkItemRequest item, long version, VersionType versionType) {
|
||||
|
@ -130,26 +130,36 @@ public class TransportDeleteAction extends TransportReplicationAction<DeleteRequ
|
||||
protected Tuple<DeleteResponse, DeleteRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) {
|
||||
DeleteRequest request = shardRequest.request;
|
||||
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id());
|
||||
Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY);
|
||||
final WriteResult<DeleteResponse> result = executeDeleteRequestOnPrimary(request, indexShard);
|
||||
processAfterWrite(request.refresh(), indexShard, result.location);
|
||||
return new Tuple<>(result.response, shardRequest.request);
|
||||
}
|
||||
|
||||
public static WriteResult<DeleteResponse> executeDeleteRequestOnPrimary(DeleteRequest request, IndexShard indexShard) {
|
||||
Engine.Delete delete = indexShard.prepareDeleteOnPrimary(request.type(), request.id(), request.version(), request.versionType());
|
||||
indexShard.delete(delete);
|
||||
// update the request with the version so it will go to the replicas
|
||||
request.versionType(delete.versionType().versionTypeForReplicationAndRecovery());
|
||||
request.version(delete.version());
|
||||
|
||||
assert request.versionType().validateVersionForWrites(request.version());
|
||||
processAfter(request.refresh(), indexShard, delete.getTranslogLocation());
|
||||
|
||||
DeleteResponse response = new DeleteResponse(shardRequest.shardId.getIndex(), request.type(), request.id(), delete.version(), delete.found());
|
||||
return new Tuple<>(response, shardRequest.request);
|
||||
return new WriteResult<>(
|
||||
new DeleteResponse(indexShard.shardId().getIndex(), request.type(), request.id(), delete.version(), delete.found()),
|
||||
delete.getTranslogLocation());
|
||||
}
|
||||
|
||||
public static Engine.Delete executeDeleteRequestOnReplica(DeleteRequest request, IndexShard indexShard) {
|
||||
Engine.Delete delete = indexShard.prepareDeleteOnReplica(request.type(), request.id(), request.version(), request.versionType());
|
||||
indexShard.delete(delete);
|
||||
return delete;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void shardOperationOnReplica(ShardId shardId, DeleteRequest request) {
|
||||
IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShard(shardId.id());
|
||||
Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version(), request.versionType(), Engine.Operation.Origin.REPLICA);
|
||||
|
||||
indexShard.delete(delete);
|
||||
processAfter(request.refresh(), indexShard, delete.getTranslogLocation());
|
||||
Engine.Delete delete = executeDeleteRequestOnReplica(request, indexShard);
|
||||
processAfterWrite(request.refresh(), indexShard, delete.getTranslogLocation());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -166,11 +166,11 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
|
||||
IndexService indexService = indicesService.indexServiceSafe(shardRequest.shardId.getIndex());
|
||||
IndexShard indexShard = indexService.getShard(shardRequest.shardId.id());
|
||||
|
||||
final WriteResult<IndexResponse> result = executeIndexRequestOnPrimary(request, indexShard);
|
||||
final WriteResult<IndexResponse> result = executeIndexRequestOnPrimary(request, indexShard, mappingUpdatedAction);
|
||||
|
||||
final IndexResponse response = result.response;
|
||||
final Translog.Location location = result.location;
|
||||
processAfter(request.refresh(), indexShard, location);
|
||||
processAfterWrite(request.refresh(), indexShard, location);
|
||||
return new Tuple<>(response, shardRequest.request);
|
||||
}
|
||||
|
||||
@ -178,16 +178,64 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
|
||||
protected void shardOperationOnReplica(ShardId shardId, IndexRequest request) {
|
||||
IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
|
||||
IndexShard indexShard = indexService.getShard(shardId.id());
|
||||
final Engine.Index operation = executeIndexRequestOnReplica(request, indexShard);
|
||||
processAfterWrite(request.refresh(), indexShard, operation.getTranslogLocation());
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the given {@link IndexRequest} on a replica shard, throwing a
|
||||
* {@link RetryOnReplicaException} if the operation needs to be re-tried.
|
||||
*/
|
||||
public static Engine.Index executeIndexRequestOnReplica(IndexRequest request, IndexShard indexShard) {
|
||||
final ShardId shardId = indexShard.shardId();
|
||||
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, request.source()).index(shardId.getIndex()).type(request.type()).id(request.id())
|
||||
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
|
||||
|
||||
final Engine.Index operation = indexShard.prepareIndex(sourceToParse, request.version(), request.versionType(), Engine.Operation.Origin.REPLICA);
|
||||
final Engine.Index operation = indexShard.prepareIndexOnReplica(sourceToParse, request.version(), request.versionType());
|
||||
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
|
||||
if (update != null) {
|
||||
throw new RetryOnReplicaException(shardId, "Mappings are not available on the replica yet, triggered update: " + update);
|
||||
}
|
||||
indexShard.index(operation);
|
||||
processAfter(request.refresh(), indexShard, operation.getTranslogLocation());
|
||||
return operation;
|
||||
}
|
||||
|
||||
/** Utility method to prepare an index operation on primary shards */
|
||||
public static Engine.Index prepareIndexOperationOnPrimary(IndexRequest request, IndexShard indexShard) {
|
||||
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, request.source()).index(request.index()).type(request.type()).id(request.id())
|
||||
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
|
||||
return indexShard.prepareIndexOnPrimary(sourceToParse, request.version(), request.versionType());
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the given {@link IndexRequest} on a primary shard, throwing a
|
||||
* {@link RetryOnPrimaryException} if the operation needs to be re-tried.
|
||||
*/
|
||||
public static WriteResult<IndexResponse> executeIndexRequestOnPrimary(IndexRequest request, IndexShard indexShard, MappingUpdatedAction mappingUpdatedAction) throws Throwable {
|
||||
Engine.Index operation = prepareIndexOperationOnPrimary(request, indexShard);
|
||||
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
|
||||
final ShardId shardId = indexShard.shardId();
|
||||
if (update != null) {
|
||||
final String indexName = shardId.getIndex();
|
||||
mappingUpdatedAction.updateMappingOnMasterSynchronously(indexName, request.type(), update);
|
||||
operation = prepareIndexOperationOnPrimary(request, indexShard);
|
||||
update = operation.parsedDoc().dynamicMappingsUpdate();
|
||||
if (update != null) {
|
||||
throw new RetryOnPrimaryException(shardId,
|
||||
"Dynamic mappings are not available on the node that holds the primary yet");
|
||||
}
|
||||
}
|
||||
final boolean created = indexShard.index(operation);
|
||||
|
||||
// update the version on request so it will happen on the replicas
|
||||
final long version = operation.version();
|
||||
request.version(version);
|
||||
request.versionType(request.versionType().versionTypeForReplicationAndRecovery());
|
||||
|
||||
assert request.versionType().validateVersionForWrites(request.version());
|
||||
|
||||
return new WriteResult<>(new IndexResponse(shardId.getIndex(), request.type(), request.id(), request.version(), created), operation.getTranslogLocation());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -25,9 +25,6 @@ import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionWriteResponse;
|
||||
import org.elasticsearch.action.UnavailableShardsException;
|
||||
import org.elasticsearch.action.WriteConsistencyLevel;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest.OpType;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.TransportAction;
|
||||
import org.elasticsearch.action.support.TransportActions;
|
||||
@ -55,10 +52,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
@ -1071,43 +1065,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
|
||||
}
|
||||
}
|
||||
|
||||
/** Utility method to create either an index or a create operation depending
|
||||
* on the {@link OpType} of the request. */
|
||||
private Engine.Index prepareIndexOperationOnPrimary(IndexRequest request, IndexShard indexShard) {
|
||||
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, request.source()).index(request.index()).type(request.type()).id(request.id())
|
||||
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
|
||||
return indexShard.prepareIndex(sourceToParse, request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY);
|
||||
}
|
||||
|
||||
/** Execute the given {@link IndexRequest} on a primary shard, throwing a
|
||||
* {@link RetryOnPrimaryException} if the operation needs to be re-tried. */
|
||||
protected final WriteResult<IndexResponse> executeIndexRequestOnPrimary(IndexRequest request, IndexShard indexShard) throws Throwable {
|
||||
Engine.Index operation = prepareIndexOperationOnPrimary(request, indexShard);
|
||||
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
|
||||
final ShardId shardId = indexShard.shardId();
|
||||
if (update != null) {
|
||||
final String indexName = shardId.getIndex();
|
||||
mappingUpdatedAction.updateMappingOnMasterSynchronously(indexName, request.type(), update);
|
||||
operation = prepareIndexOperationOnPrimary(request, indexShard);
|
||||
update = operation.parsedDoc().dynamicMappingsUpdate();
|
||||
if (update != null) {
|
||||
throw new RetryOnPrimaryException(shardId,
|
||||
"Dynamics mappings are not available on the node that holds the primary yet");
|
||||
}
|
||||
}
|
||||
final boolean created = indexShard.index(operation);
|
||||
|
||||
// update the version on request so it will happen on the replicas
|
||||
final long version = operation.version();
|
||||
request.version(version);
|
||||
request.versionType(request.versionType().versionTypeForReplicationAndRecovery());
|
||||
|
||||
assert request.versionType().validateVersionForWrites(request.version());
|
||||
|
||||
return new WriteResult(new IndexResponse(shardId.getIndex(), request.type(), request.id(), request.version(), created), operation.getTranslogLocation());
|
||||
}
|
||||
|
||||
protected final void processAfter(boolean refresh, IndexShard indexShard, Translog.Location location) {
|
||||
protected final void processAfterWrite(boolean refresh, IndexShard indexShard, Translog.Location location) {
|
||||
if (refresh) {
|
||||
try {
|
||||
indexShard.refresh("refresh_flag_index");
|
||||
|
@ -176,7 +176,6 @@ public class ClusterModule extends AbstractModule {
|
||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, Validator.TIME_NON_NEGATIVE);
|
||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE);
|
||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE);
|
||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC, Validator.BYTES_SIZE);
|
||||
registerClusterDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*", ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR);
|
||||
registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, Validator.INTEGER);
|
||||
registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, Validator.INTEGER);
|
||||
|
@ -748,8 +748,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
|
||||
IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC,
|
||||
RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE,
|
||||
RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE,
|
||||
RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC,
|
||||
RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC));
|
||||
RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC));
|
||||
|
||||
|
||||
/** All known time cluster settings. */
|
||||
|
@ -538,6 +538,13 @@ public final class ShardRouting implements Streamable, ToXContent {
|
||||
return b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if this shard is a relocation target for another shard (i.e., was created with {@link #buildTargetRelocatingShard()}
|
||||
*/
|
||||
public boolean isRelocationTarget() {
|
||||
return state == ShardRoutingState.INITIALIZING && relocatingNodeId != null;
|
||||
}
|
||||
|
||||
/** returns true if the routing is the relocation target of the given routing */
|
||||
public boolean isRelocationTargetOf(ShardRouting other) {
|
||||
boolean b = this.allocationId != null && other.allocationId != null && this.state == ShardRoutingState.INITIALIZING &&
|
||||
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import org.apache.lucene.util.ThreadInterruptedException;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
@ -84,7 +85,7 @@ public class CancellableThreads {
|
||||
RuntimeException throwable = null;
|
||||
try {
|
||||
interruptable.run();
|
||||
} catch (InterruptedException e) {
|
||||
} catch (InterruptedException | ThreadInterruptedException e) {
|
||||
// assume this is us and ignore
|
||||
} catch (RuntimeException t) {
|
||||
throwable = t;
|
||||
|
@ -584,7 +584,10 @@ class DocumentParser implements Closeable {
|
||||
if (context.parser().estimatedNumberType()) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
// no templates are defined, we use float by default instead of double
|
||||
// since this is much more space-efficient and should be enough most of
|
||||
// the time
|
||||
builder = MapperBuilders.floatField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} else {
|
||||
@ -597,7 +600,10 @@ class DocumentParser implements Closeable {
|
||||
} else if (numberType == XContentParser.NumberType.DOUBLE) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
// no templates are defined, we use float by default instead of double
|
||||
// since this is much more space-efficient and should be enough most of
|
||||
// the time
|
||||
builder = MapperBuilders.floatField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
@ -370,15 +370,6 @@ public abstract class FieldMapper extends Mapper {
|
||||
return;
|
||||
}
|
||||
FieldMapper fieldMergeWith = (FieldMapper) mergeWith;
|
||||
List<String> subConflicts = new ArrayList<>(); // TODO: just expose list from MergeResult?
|
||||
fieldType().checkTypeName(fieldMergeWith.fieldType(), subConflicts);
|
||||
if (subConflicts.isEmpty() == false) {
|
||||
// return early if field types don't match
|
||||
assert subConflicts.size() == 1;
|
||||
mergeResult.addConflict(subConflicts.get(0));
|
||||
return;
|
||||
}
|
||||
|
||||
multiFields.merge(mergeWith, mergeResult);
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
|
@ -154,12 +154,9 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||
MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName());
|
||||
if (ref != null) {
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ref.get().checkTypeName(fieldMapper.fieldType(), conflicts);
|
||||
if (conflicts.isEmpty()) { // only check compat if they are the same type
|
||||
final Set<String> types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName());
|
||||
boolean strict = beStrict(type, types, updateAllTypes);
|
||||
ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
|
||||
}
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString());
|
||||
}
|
||||
@ -169,12 +166,9 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||
MappedFieldTypeReference indexNameRef = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName());
|
||||
if (indexNameRef != null) {
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
indexNameRef.get().checkTypeName(fieldMapper.fieldType(), conflicts);
|
||||
if (conflicts.isEmpty()) { // only check compat if they are the same type
|
||||
final Set<String> types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName());
|
||||
boolean strict = beStrict(type, types, updateAllTypes);
|
||||
indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
|
||||
}
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString());
|
||||
}
|
||||
|
@ -229,9 +229,9 @@ public abstract class MappedFieldType extends FieldType {
|
||||
public abstract String typeName();
|
||||
|
||||
/** Checks this type is the same type as other. Adds a conflict if they are different. */
|
||||
public final void checkTypeName(MappedFieldType other, List<String> conflicts) {
|
||||
private final void checkTypeName(MappedFieldType other) {
|
||||
if (typeName().equals(other.typeName()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]");
|
||||
throw new IllegalArgumentException("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]");
|
||||
} else if (getClass() != other.getClass()) {
|
||||
throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName());
|
||||
}
|
||||
@ -243,6 +243,8 @@ public abstract class MappedFieldType extends FieldType {
|
||||
* Otherwise, only properties which must never change in an index are checked.
|
||||
*/
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts, boolean strict) {
|
||||
checkTypeName(other);
|
||||
|
||||
boolean indexed = indexOptions() != IndexOptions.NONE;
|
||||
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
|
||||
// TODO: should be validating if index options go "up" (but "down" is ok)
|
||||
|
@ -134,6 +134,26 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
|
||||
public ParseFieldMatcher parseFieldMatcher() {
|
||||
return parseFieldMatcher;
|
||||
}
|
||||
|
||||
public boolean isWithinMultiField() { return false; }
|
||||
|
||||
protected Function<String, TypeParser> typeParsers() { return typeParsers; }
|
||||
|
||||
protected Function<String, SimilarityProvider> similarityLookupService() { return similarityLookupService; }
|
||||
|
||||
public ParserContext createMultiFieldContext(ParserContext in) {
|
||||
return new MultiFieldParserContext(in) {
|
||||
@Override
|
||||
public boolean isWithinMultiField() { return true; }
|
||||
};
|
||||
}
|
||||
|
||||
static class MultiFieldParserContext extends ParserContext {
|
||||
MultiFieldParserContext(ParserContext in) {
|
||||
super(in.type(), in.analysisService, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException;
|
||||
|
@ -46,7 +46,7 @@ import java.util.Map;
|
||||
|
||||
import static org.apache.lucene.index.IndexOptions.NONE;
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
|
||||
public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
|
||||
@ -159,7 +159,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
StringFieldMapper.Builder builder = stringField(name);
|
||||
parseField(builder, name, node, parserContext);
|
||||
parseTextField(builder, name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
@ -181,34 +182,17 @@ public class TypeParsers {
|
||||
}
|
||||
}
|
||||
|
||||
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer();
|
||||
NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer();
|
||||
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
final String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
final Object propNode = entry.getValue();
|
||||
if (propName.equals("index_name") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
builder.indexName(propNode.toString());
|
||||
iterator.remove();
|
||||
} else if (propName.equals("store")) {
|
||||
builder.store(parseStore(name, propNode.toString()));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("index")) {
|
||||
parseIndex(name, propNode.toString(), builder);
|
||||
iterator.remove();
|
||||
} else if (propName.equals("tokenized")) {
|
||||
builder.tokenized(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals(DOC_VALUES)) {
|
||||
builder.docValues(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("term_vector")) {
|
||||
if (propName.equals("term_vector")) {
|
||||
parseTermVector(name, propNode.toString(), builder);
|
||||
iterator.remove();
|
||||
} else if (propName.equals("boost")) {
|
||||
builder.boost(nodeFloatValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("store_term_vectors")) {
|
||||
builder.storeTermVectors(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
@ -221,6 +205,69 @@ public class TypeParsers {
|
||||
} else if (propName.equals("store_term_vector_payloads")) {
|
||||
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0
|
||||
propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
|
||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||
}
|
||||
indexAnalyzer = analyzer;
|
||||
iterator.remove();
|
||||
} else if (propName.equals("search_analyzer")) {
|
||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||
}
|
||||
searchAnalyzer = analyzer;
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
if (indexAnalyzer == null) {
|
||||
if (searchAnalyzer != null) {
|
||||
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set");
|
||||
}
|
||||
} else if (searchAnalyzer == null) {
|
||||
searchAnalyzer = indexAnalyzer;
|
||||
}
|
||||
builder.indexAnalyzer(indexAnalyzer);
|
||||
builder.searchAnalyzer(searchAnalyzer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse text field attributes. In addition to {@link #parseField common attributes}
|
||||
* this will parse analysis and term-vectors related settings.
|
||||
*/
|
||||
public static void parseTextField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
parseField(builder, name, fieldNode, parserContext);
|
||||
parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse common field attributes such as {@code doc_values} or {@code store}.
|
||||
*/
|
||||
public static void parseField(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
Version indexVersionCreated = parserContext.indexVersionCreated();
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
final String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
final Object propNode = entry.getValue();
|
||||
if (propName.equals("index_name") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
|
||||
builder.indexName(propNode.toString());
|
||||
iterator.remove();
|
||||
} else if (propName.equals("store")) {
|
||||
builder.store(parseStore(name, propNode.toString()));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("index")) {
|
||||
parseIndex(name, propNode.toString(), builder);
|
||||
iterator.remove();
|
||||
} else if (propName.equals(DOC_VALUES)) {
|
||||
builder.docValues(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("boost")) {
|
||||
builder.boost(nodeFloatValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("omit_norms")) {
|
||||
builder.omitNorms(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
@ -242,7 +289,7 @@ public class TypeParsers {
|
||||
iterator.remove();
|
||||
} else if (propName.equals("omit_term_freq_and_positions")) {
|
||||
final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_0_0_RC2)) {
|
||||
if (indexVersionCreated.onOrAfter(Version.V_1_0_0_RC2)) {
|
||||
throw new ElasticsearchParseException("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs'] instead");
|
||||
}
|
||||
// deprecated option for BW compat
|
||||
@ -251,29 +298,13 @@ public class TypeParsers {
|
||||
} else if (propName.equals("index_options")) {
|
||||
builder.indexOptions(nodeIndexOptionValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0
|
||||
propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
|
||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||
}
|
||||
indexAnalyzer = analyzer;
|
||||
iterator.remove();
|
||||
} else if (propName.equals("search_analyzer")) {
|
||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||
}
|
||||
searchAnalyzer = analyzer;
|
||||
iterator.remove();
|
||||
} else if (propName.equals("include_in_all")) {
|
||||
builder.includeInAll(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
} else if (propName.equals("postings_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
|
||||
// ignore for old indexes
|
||||
iterator.remove();
|
||||
} else if (propName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
} else if (propName.equals("doc_values_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
|
||||
// ignore for old indexes
|
||||
iterator.remove();
|
||||
} else if (propName.equals("similarity")) {
|
||||
@ -284,23 +315,28 @@ public class TypeParsers {
|
||||
builder.fieldDataSettings(settings);
|
||||
iterator.remove();
|
||||
} else if (propName.equals("copy_to")) {
|
||||
if (parserContext.isWithinMultiField()) {
|
||||
if (indexVersionCreated.after(Version.V_2_1_0) ||
|
||||
(indexVersionCreated.after(Version.V_2_0_1) && indexVersionCreated.before(Version.V_2_1_0))) {
|
||||
throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] which is within a multi field.");
|
||||
} else {
|
||||
ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]").warn("Found a copy_to in field [" + name + "] which is within a multi field. This feature has been removed and the copy_to will be removed from the mapping.");
|
||||
}
|
||||
} else {
|
||||
parseCopyFields(propNode, builder);
|
||||
}
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
if (indexAnalyzer == null) {
|
||||
if (searchAnalyzer != null) {
|
||||
throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set");
|
||||
if (indexVersionCreated.before(Version.V_2_2_0)) {
|
||||
// analyzer, search_analyzer, term_vectors were accepted on all fields
|
||||
// before 2.2, even though it made little sense
|
||||
parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
|
||||
}
|
||||
} else if (searchAnalyzer == null) {
|
||||
searchAnalyzer = indexAnalyzer;
|
||||
}
|
||||
builder.indexAnalyzer(indexAnalyzer);
|
||||
builder.searchAnalyzer(searchAnalyzer);
|
||||
}
|
||||
|
||||
public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
|
||||
parserContext = parserContext.createMultiFieldContext(parserContext);
|
||||
if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
|
||||
return true;
|
||||
|
@ -49,7 +49,7 @@ import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -134,7 +134,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
parseTextField(builder, builder.name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
@ -19,10 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.shard;
|
||||
|
||||
import org.apache.lucene.index.CheckIndex;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
|
||||
import org.apache.lucene.index.SnapshotDeletionPolicy;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.QueryCachingPolicy;
|
||||
import org.apache.lucene.search.UsageTrackingQueryCachingPolicy;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
@ -194,8 +191,10 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
private final IndexSearcherWrapper searcherWrapper;
|
||||
private final TimeValue inactiveTime;
|
||||
|
||||
/** True if this shard is still indexing (recently) and false if we've been idle for long enough (as periodically checked by {@link
|
||||
* IndexingMemoryController}). */
|
||||
/**
|
||||
* True if this shard is still indexing (recently) and false if we've been idle for long enough (as periodically checked by {@link
|
||||
* IndexingMemoryController}).
|
||||
*/
|
||||
private final AtomicBoolean active = new AtomicBoolean();
|
||||
|
||||
public IndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache,
|
||||
@ -445,9 +444,21 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
return previousState;
|
||||
}
|
||||
|
||||
public Engine.Index prepareIndex(SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin) {
|
||||
public Engine.Index prepareIndexOnPrimary(SourceToParse source, long version, VersionType versionType) {
|
||||
try {
|
||||
return prepareIndex(docMapper(source.type()), source, version, versionType, origin);
|
||||
if (shardRouting.primary() == false) {
|
||||
throw new IllegalIndexShardStateException(shardId, state, "shard is not a primary");
|
||||
}
|
||||
return prepareIndex(docMapper(source.type()), source, version, versionType, Engine.Operation.Origin.PRIMARY);
|
||||
} catch (Throwable t) {
|
||||
verifyNotClosed(t);
|
||||
throw t;
|
||||
}
|
||||
}
|
||||
|
||||
public Engine.Index prepareIndexOnReplica(SourceToParse source, long version, VersionType versionType) {
|
||||
try {
|
||||
return prepareIndex(docMapper(source.type()), source, version, versionType, Engine.Operation.Origin.REPLICA);
|
||||
} catch (Throwable t) {
|
||||
verifyNotClosed(t);
|
||||
throw t;
|
||||
@ -486,11 +497,27 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
return created;
|
||||
}
|
||||
|
||||
public Engine.Delete prepareDelete(String type, String id, long version, VersionType versionType, Engine.Operation.Origin origin) {
|
||||
long startTime = System.nanoTime();
|
||||
final DocumentMapper documentMapper = docMapper(type).getDocumentMapper();
|
||||
return new Engine.Delete(type, id, documentMapper.uidMapper().term(Uid.createUid(type, id)), version, versionType, origin, startTime, false);
|
||||
public Engine.Delete prepareDeleteOnPrimary(String type, String id, long version, VersionType versionType) {
|
||||
if (shardRouting.primary() == false) {
|
||||
throw new IllegalIndexShardStateException(shardId, state, "shard is not a primary");
|
||||
}
|
||||
final DocumentMapper documentMapper = docMapper(type).getDocumentMapper();
|
||||
return prepareDelete(type, id, documentMapper.uidMapper().term(Uid.createUid(type, id)), version, versionType, Engine.Operation.Origin.PRIMARY);
|
||||
}
|
||||
|
||||
public Engine.Delete prepareDeleteOnReplica(String type, String id, long version, VersionType versionType) {
|
||||
if (shardRouting.primary() && shardRouting.isRelocationTarget() == false) {
|
||||
throw new IllegalIndexShardStateException(shardId, state, "shard is not a replica");
|
||||
}
|
||||
final DocumentMapper documentMapper = docMapper(type).getDocumentMapper();
|
||||
return prepareDelete(type, id, documentMapper.uidMapper().term(Uid.createUid(type, id)), version, versionType, Engine.Operation.Origin.REPLICA);
|
||||
}
|
||||
|
||||
static Engine.Delete prepareDelete(String type, String id, Term uid, long version, VersionType versionType, Engine.Operation.Origin origin) {
|
||||
long startTime = System.nanoTime();
|
||||
return new Engine.Delete(type, id, uid, version, versionType, origin, startTime, false);
|
||||
}
|
||||
|
||||
|
||||
public void delete(Engine.Delete delete) {
|
||||
ensureWriteAllowed(delete);
|
||||
@ -533,11 +560,8 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
}
|
||||
|
||||
public DocsStats docStats() {
|
||||
final Engine.Searcher searcher = acquireSearcher("doc_stats");
|
||||
try {
|
||||
try (Engine.Searcher searcher = acquireSearcher("doc_stats")) {
|
||||
return new DocsStats(searcher.reader().numDocs(), searcher.reader().numDeletedDocs());
|
||||
} finally {
|
||||
searcher.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -977,8 +1001,10 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
this.shardEventListener.delegates.add(onShardFailure);
|
||||
}
|
||||
|
||||
/** Change the indexing and translog buffer sizes. If {@code IndexWriter} is currently using more than
|
||||
* the new buffering indexing size then we do a refresh to free up the heap. */
|
||||
/**
|
||||
* Change the indexing and translog buffer sizes. If {@code IndexWriter} is currently using more than
|
||||
* the new buffering indexing size then we do a refresh to free up the heap.
|
||||
*/
|
||||
public void updateBufferSize(ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
|
||||
|
||||
final EngineConfig config = engineConfig;
|
||||
@ -1021,9 +1047,11 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
engine.getTranslog().updateBuffer(shardTranslogBufferSize);
|
||||
}
|
||||
|
||||
/** Called by {@link IndexingMemoryController} to check whether more than {@code inactiveTimeNS} has passed since the last
|
||||
/**
|
||||
* Called by {@link IndexingMemoryController} to check whether more than {@code inactiveTimeNS} has passed since the last
|
||||
* indexing operation, and become inactive (reducing indexing and translog buffers to tiny values) if so. This returns true
|
||||
* if the shard is inactive. */
|
||||
* if the shard is inactive.
|
||||
*/
|
||||
public boolean checkIdle() {
|
||||
return checkIdle(inactiveTime.nanos());
|
||||
}
|
||||
@ -1042,8 +1070,10 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
return active.get() == false;
|
||||
}
|
||||
|
||||
/** Returns {@code true} if this shard is active (has seen indexing ops in the last {@link
|
||||
* IndexShard#INDEX_SHARD_INACTIVE_TIME_SETTING} (default 5 minutes), else {@code false}. */
|
||||
/**
|
||||
* Returns {@code true} if this shard is active (has seen indexing ops in the last {@link
|
||||
* IndexShard#INDEX_SHARD_INACTIVE_TIME_SETTING} (default 5 minutes), else {@code false}.
|
||||
*/
|
||||
public boolean getActive() {
|
||||
return active.get();
|
||||
}
|
||||
@ -1077,10 +1107,10 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
return storeRecovery.recoverFromStore(this, shouldExist, localNode);
|
||||
}
|
||||
|
||||
public boolean restoreFromRepository(IndexShardRepository repository, DiscoveryNode locaNode) {
|
||||
public boolean restoreFromRepository(IndexShardRepository repository, DiscoveryNode localNode) {
|
||||
assert shardRouting.primary() : "recover from store only makes sense if the shard is a primary shard";
|
||||
StoreRecovery storeRecovery = new StoreRecovery(shardId, logger);
|
||||
return storeRecovery.recoverFromRepository(this, repository, locaNode);
|
||||
return storeRecovery.recoverFromRepository(this, repository, localNode);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1369,8 +1399,10 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
return engine;
|
||||
}
|
||||
|
||||
/** NOTE: returns null if engine is not yet started (e.g. recovery phase 1, copying over index files, is still running), or if engine is
|
||||
* closed. */
|
||||
/**
|
||||
* NOTE: returns null if engine is not yet started (e.g. recovery phase 1, copying over index files, is still running), or if engine is
|
||||
* closed.
|
||||
*/
|
||||
protected Engine getEngineOrNull() {
|
||||
return this.currentEngineReference.get();
|
||||
}
|
||||
@ -1578,6 +1610,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
|
||||
/**
|
||||
* Simple struct encapsulating a shard failure
|
||||
*
|
||||
* @see IndexShard#addShardFailureCallback(Callback)
|
||||
*/
|
||||
public static final class ShardFailure {
|
||||
|
@ -75,12 +75,6 @@ public class RecoverySettings extends AbstractComponent implements Closeable {
|
||||
|
||||
public static final long SMALL_FILE_CUTOFF_BYTES = ByteSizeValue.parseBytesSizeValue("5mb", "SMALL_FILE_CUTOFF_BYTES").bytes();
|
||||
|
||||
/**
|
||||
* Use {@link #INDICES_RECOVERY_MAX_BYTES_PER_SEC} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public static final String INDICES_RECOVERY_MAX_SIZE_PER_SEC = "indices.recovery.max_size_per_sec";
|
||||
|
||||
private volatile ByteSizeValue fileChunkSize;
|
||||
|
||||
private volatile boolean compress;
|
||||
@ -105,9 +99,9 @@ public class RecoverySettings extends AbstractComponent implements Closeable {
|
||||
public RecoverySettings(Settings settings, NodeSettingsService nodeSettingsService) {
|
||||
super(settings);
|
||||
|
||||
this.fileChunkSize = settings.getAsBytesSize(INDICES_RECOVERY_FILE_CHUNK_SIZE, settings.getAsBytesSize("index.shard.recovery.file_chunk_size", new ByteSizeValue(512, ByteSizeUnit.KB)));
|
||||
this.translogOps = settings.getAsInt(INDICES_RECOVERY_TRANSLOG_OPS, settings.getAsInt("index.shard.recovery.translog_ops", 1000));
|
||||
this.translogSize = settings.getAsBytesSize(INDICES_RECOVERY_TRANSLOG_SIZE, settings.getAsBytesSize("index.shard.recovery.translog_size", new ByteSizeValue(512, ByteSizeUnit.KB)));
|
||||
this.fileChunkSize = settings.getAsBytesSize(INDICES_RECOVERY_FILE_CHUNK_SIZE, new ByteSizeValue(512, ByteSizeUnit.KB));
|
||||
this.translogOps = settings.getAsInt(INDICES_RECOVERY_TRANSLOG_OPS, 1000);
|
||||
this.translogSize = settings.getAsBytesSize(INDICES_RECOVERY_TRANSLOG_SIZE, new ByteSizeValue(512, ByteSizeUnit.KB));
|
||||
this.compress = settings.getAsBoolean(INDICES_RECOVERY_COMPRESS, true);
|
||||
|
||||
this.retryDelayStateSync = settings.getAsTime(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, TimeValue.timeValueMillis(500));
|
||||
@ -124,14 +118,14 @@ public class RecoverySettings extends AbstractComponent implements Closeable {
|
||||
);
|
||||
|
||||
|
||||
this.concurrentStreams = settings.getAsInt("indices.recovery.concurrent_streams", settings.getAsInt("index.shard.recovery.concurrent_streams", 3));
|
||||
this.concurrentStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_STREAMS, 3);
|
||||
this.concurrentStreamPool = EsExecutors.newScaling("recovery_stream", 0, concurrentStreams, 60, TimeUnit.SECONDS,
|
||||
EsExecutors.daemonThreadFactory(settings, "[recovery_stream]"));
|
||||
this.concurrentSmallFileStreams = settings.getAsInt("indices.recovery.concurrent_small_file_streams", settings.getAsInt("index.shard.recovery.concurrent_small_file_streams", 2));
|
||||
this.concurrentSmallFileStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 2);
|
||||
this.concurrentSmallFileStreamPool = EsExecutors.newScaling("small_file_recovery_stream", 0, concurrentSmallFileStreams, 60,
|
||||
TimeUnit.SECONDS, EsExecutors.daemonThreadFactory(settings, "[small_file_recovery_stream]"));
|
||||
|
||||
this.maxBytesPerSec = settings.getAsBytesSize("indices.recovery.max_bytes_per_sec", settings.getAsBytesSize("indices.recovery.max_size_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB)));
|
||||
this.maxBytesPerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, new ByteSizeValue(40, ByteSizeUnit.MB));
|
||||
if (maxBytesPerSec.bytes() <= 0) {
|
||||
rateLimiter = null;
|
||||
} else {
|
||||
@ -206,7 +200,7 @@ public class RecoverySettings extends AbstractComponent implements Closeable {
|
||||
class ApplySettings implements NodeSettingsService.Listener {
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
ByteSizeValue maxSizePerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, settings.getAsBytesSize(INDICES_RECOVERY_MAX_SIZE_PER_SEC, RecoverySettings.this.maxBytesPerSec));
|
||||
ByteSizeValue maxSizePerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.this.maxBytesPerSec);
|
||||
if (!Objects.equals(maxSizePerSec, RecoverySettings.this.maxBytesPerSec)) {
|
||||
logger.info("updating [{}] from [{}] to [{}]", INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.this.maxBytesPerSec, maxSizePerSec);
|
||||
RecoverySettings.this.maxBytesPerSec = maxSizePerSec;
|
||||
|
@ -208,11 +208,15 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe
|
||||
} catch (CancellableThreads.ExecutionCancelledException e) {
|
||||
logger.trace("recovery cancelled", e);
|
||||
} catch (Throwable e) {
|
||||
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("[{}][{}] Got exception on recovery", e, request.shardId().index().name(), request.shardId().id());
|
||||
}
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
if (cause instanceof CancellableThreads.ExecutionCancelledException) {
|
||||
// this can also come from the source wrapped in a RemoteTransportException
|
||||
onGoingRecoveries.failRecovery(recoveryStatus.recoveryId(), new RecoveryFailedException(request, "source has canceled the recovery", cause), false);
|
||||
return;
|
||||
}
|
||||
if (cause instanceof RecoveryEngineException) {
|
||||
// unwrap an exception that was thrown as part of the recovery
|
||||
cause = cause.getCause();
|
||||
|
@ -1,3 +1,22 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.monitor;
|
||||
|
||||
import java.lang.management.OperatingSystemMXBean;
|
||||
|
@ -128,14 +128,13 @@ public class Node implements Releasable {
|
||||
* @param preparedSettings Base settings to configure the node with
|
||||
*/
|
||||
public Node(Settings preparedSettings) {
|
||||
this(preparedSettings, Version.CURRENT, Collections.<Class<? extends Plugin>>emptyList());
|
||||
this(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null), Version.CURRENT, Collections.<Class<? extends Plugin>>emptyList());
|
||||
}
|
||||
|
||||
Node(Settings preparedSettings, Version version, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
final Settings pSettings = settingsBuilder().put(preparedSettings)
|
||||
protected Node(Environment tmpEnv, Version version, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
Settings tmpSettings = settingsBuilder().put(tmpEnv.settings())
|
||||
.put(Client.CLIENT_TYPE_SETTING, CLIENT_TYPE).build();
|
||||
Environment tmpEnv = InternalSettingsPreparer.prepareEnvironment(pSettings, null);
|
||||
Settings tmpSettings = TribeService.processSettings(tmpEnv.settings());
|
||||
tmpSettings = TribeService.processSettings(tmpSettings);
|
||||
|
||||
ESLogger logger = Loggers.getLogger(Node.class, tmpSettings.get("name"));
|
||||
logger.info("version[{}], pid[{}], build[{}/{}]", version, JvmInfo.jvmInfo().pid(), Build.CURRENT.shortHash(), Build.CURRENT.date());
|
||||
|
@ -316,7 +316,8 @@ public class PluginsService extends AbstractComponent {
|
||||
// gather urls for jar files
|
||||
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(module, "*.jar")) {
|
||||
for (Path jar : jarStream) {
|
||||
bundle.urls.add(jar.toUri().toURL());
|
||||
// normalize with toRealPath to get symlinks out of our hair
|
||||
bundle.urls.add(jar.toRealPath().toUri().toURL());
|
||||
}
|
||||
}
|
||||
bundles.add(bundle);
|
||||
@ -357,7 +358,8 @@ public class PluginsService extends AbstractComponent {
|
||||
// a jvm plugin: gather urls for jar files
|
||||
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
|
||||
for (Path jar : jarStream) {
|
||||
urls.add(jar.toUri().toURL());
|
||||
// normalize with toRealPath to get symlinks out of our hair
|
||||
urls.add(jar.toRealPath().toUri().toURL());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -75,21 +75,27 @@ public class RestTable {
|
||||
|
||||
BytesStreamOutput bytesOut = channel.bytesOutput();
|
||||
UTF8StreamWriter out = new UTF8StreamWriter().setOutput(bytesOut);
|
||||
int lastHeader = headers.size() - 1;
|
||||
if (verbose) {
|
||||
for (int col = 0; col < headers.size(); col++) {
|
||||
DisplayHeader header = headers.get(col);
|
||||
pad(new Table.Cell(header.display, table.findHeaderByName(header.name)), width[col], request, out);
|
||||
boolean isLastColumn = col == lastHeader;
|
||||
pad(new Table.Cell(header.display, table.findHeaderByName(header.name)), width[col], request, out, isLastColumn);
|
||||
if (!isLastColumn) {
|
||||
out.append(" ");
|
||||
}
|
||||
}
|
||||
out.append("\n");
|
||||
}
|
||||
|
||||
for (int row = 0; row < table.getRows().size(); row++) {
|
||||
for (int col = 0; col < headers.size(); col++) {
|
||||
DisplayHeader header = headers.get(col);
|
||||
pad(table.getAsMap().get(header.name).get(row), width[col], request, out);
|
||||
boolean isLastColumn = col == lastHeader;
|
||||
pad(table.getAsMap().get(header.name).get(row), width[col], request, out, isLastColumn);
|
||||
if (!isLastColumn) {
|
||||
out.append(" ");
|
||||
}
|
||||
}
|
||||
out.append("\n");
|
||||
}
|
||||
out.close();
|
||||
@ -236,6 +242,10 @@ public class RestTable {
|
||||
}
|
||||
|
||||
public static void pad(Table.Cell cell, int width, RestRequest request, UTF8StreamWriter out) throws IOException {
|
||||
pad(cell, width, request, out, false);
|
||||
}
|
||||
|
||||
public static void pad(Table.Cell cell, int width, RestRequest request, UTF8StreamWriter out, boolean isLast) throws IOException {
|
||||
String sValue = renderValue(request, cell.value);
|
||||
int length = sValue == null ? 0 : sValue.length();
|
||||
byte leftOver = (byte) (width - length);
|
||||
@ -254,11 +264,14 @@ public class RestTable {
|
||||
if (sValue != null) {
|
||||
out.append(sValue);
|
||||
}
|
||||
// Ignores the leftover spaces if the cell is the last of the column.
|
||||
if (!isLast) {
|
||||
for (byte i = 0; i < leftOver; i++) {
|
||||
out.append(" ");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String renderValue(RestRequest request, Object value) {
|
||||
if (value == null) {
|
||||
|
@ -43,7 +43,7 @@ import java.util.Map;
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
|
||||
public class StatsAggregator extends NumericMetricsAggregator.MultiValue {
|
||||
|
||||
final ValuesSource.Numeric valuesSource;
|
||||
final ValueFormatter formatter;
|
||||
@ -54,7 +54,7 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
|
||||
DoubleArray maxes;
|
||||
|
||||
|
||||
public StatsAggegator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
|
||||
public StatsAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter,
|
||||
AggregationContext context,
|
||||
Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
@ -164,14 +164,14 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue {
|
||||
@Override
|
||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new StatsAggegator(name, null, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData);
|
||||
return new StatsAggregator(name, null, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new StatsAggegator(name, valuesSource, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData);
|
||||
return new StatsAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
@ -34,6 +34,6 @@ public class StatsParser extends NumericValuesSourceMetricsAggregatorParser<Inte
|
||||
|
||||
@Override
|
||||
protected AggregatorFactory createFactory(String aggregationName, ValuesSourceConfig<ValuesSource.Numeric> config) {
|
||||
return new StatsAggegator.Factory(aggregationName, config);
|
||||
return new StatsAggregator.Factory(aggregationName, config);
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.highlight;
|
||||
|
||||
import org.apache.lucene.search.highlight.SimpleFragmenter;
|
||||
import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -35,7 +36,29 @@ import java.util.Objects;
|
||||
* This abstract class holds parameters shared by {@link HighlightBuilder} and {@link HighlightBuilder.Field}
|
||||
* and provides the common setters, equality, hashCode calculation and common serialization
|
||||
*/
|
||||
public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterBuilder> {
|
||||
public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterBuilder<?>> {
|
||||
|
||||
public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags");
|
||||
public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags");
|
||||
public static final ParseField FIELDS_FIELD = new ParseField("fields");
|
||||
public static final ParseField ORDER_FIELD = new ParseField("order");
|
||||
public static final ParseField TAGS_SCHEMA_FIELD = new ParseField("tags_schema");
|
||||
public static final ParseField HIGHLIGHT_FILTER_FIELD = new ParseField("highlight_filter");
|
||||
public static final ParseField FRAGMENT_SIZE_FIELD = new ParseField("fragment_size");
|
||||
public static final ParseField FRAGMENT_OFFSET_FIELD = new ParseField("fragment_offset");
|
||||
public static final ParseField NUMBER_OF_FRAGMENTS_FIELD = new ParseField("number_of_fragments");
|
||||
public static final ParseField ENCODER_FIELD = new ParseField("encoder");
|
||||
public static final ParseField REQUIRE_FIELD_MATCH_FIELD = new ParseField("require_field_match");
|
||||
public static final ParseField BOUNDARY_MAX_SCAN_FIELD = new ParseField("boundary_max_scan");
|
||||
public static final ParseField BOUNDARY_CHARS_FIELD = new ParseField("boundary_chars");
|
||||
public static final ParseField TYPE_FIELD = new ParseField("type");
|
||||
public static final ParseField FRAGMENTER_FIELD = new ParseField("fragmenter");
|
||||
public static final ParseField NO_MATCH_SIZE_FIELD = new ParseField("no_match_size");
|
||||
public static final ParseField FORCE_SOURCE_FIELD = new ParseField("force_source");
|
||||
public static final ParseField PHRASE_LIMIT_FIELD = new ParseField("phrase_limit");
|
||||
public static final ParseField OPTIONS_FIELD = new ParseField("options");
|
||||
public static final ParseField HIGHLIGHT_QUERY_FIELD = new ParseField("highlight_query");
|
||||
public static final ParseField MATCHED_FIELDS_FIELD = new ParseField("matched_fields");
|
||||
|
||||
protected String[] preTags;
|
||||
|
||||
@ -49,7 +72,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
||||
|
||||
protected String fragmenter;
|
||||
|
||||
protected QueryBuilder highlightQuery;
|
||||
protected QueryBuilder<?> highlightQuery;
|
||||
|
||||
protected String order;
|
||||
|
||||
@ -175,7 +198,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
||||
* Sets a query to be used for highlighting instead of the search query.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public HB highlightQuery(QueryBuilder highlightQuery) {
|
||||
public HB highlightQuery(QueryBuilder<?> highlightQuery) {
|
||||
this.highlightQuery = highlightQuery;
|
||||
return (HB) this;
|
||||
}
|
||||
@ -183,7 +206,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
||||
/**
|
||||
* @return the value set by {@link #highlightQuery(QueryBuilder)}
|
||||
*/
|
||||
public QueryBuilder highlightQuery() {
|
||||
public QueryBuilder<?> highlightQuery() {
|
||||
return this.highlightQuery;
|
||||
}
|
||||
|
||||
@ -347,52 +370,52 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
||||
|
||||
void commonOptionsToXContent(XContentBuilder builder) throws IOException {
|
||||
if (preTags != null) {
|
||||
builder.array("pre_tags", preTags);
|
||||
builder.array(PRE_TAGS_FIELD.getPreferredName(), preTags);
|
||||
}
|
||||
if (postTags != null) {
|
||||
builder.array("post_tags", postTags);
|
||||
builder.array(POST_TAGS_FIELD.getPreferredName(), postTags);
|
||||
}
|
||||
if (fragmentSize != null) {
|
||||
builder.field("fragment_size", fragmentSize);
|
||||
builder.field(FRAGMENT_SIZE_FIELD.getPreferredName(), fragmentSize);
|
||||
}
|
||||
if (numOfFragments != null) {
|
||||
builder.field("number_of_fragments", numOfFragments);
|
||||
builder.field(NUMBER_OF_FRAGMENTS_FIELD.getPreferredName(), numOfFragments);
|
||||
}
|
||||
if (highlighterType != null) {
|
||||
builder.field("type", highlighterType);
|
||||
builder.field(TYPE_FIELD.getPreferredName(), highlighterType);
|
||||
}
|
||||
if (fragmenter != null) {
|
||||
builder.field("fragmenter", fragmenter);
|
||||
builder.field(FRAGMENTER_FIELD.getPreferredName(), fragmenter);
|
||||
}
|
||||
if (highlightQuery != null) {
|
||||
builder.field("highlight_query", highlightQuery);
|
||||
builder.field(HIGHLIGHT_QUERY_FIELD.getPreferredName(), highlightQuery);
|
||||
}
|
||||
if (order != null) {
|
||||
builder.field("order", order);
|
||||
builder.field(ORDER_FIELD.getPreferredName(), order);
|
||||
}
|
||||
if (highlightFilter != null) {
|
||||
builder.field("highlight_filter", highlightFilter);
|
||||
builder.field(HIGHLIGHT_FILTER_FIELD.getPreferredName(), highlightFilter);
|
||||
}
|
||||
if (boundaryMaxScan != null) {
|
||||
builder.field("boundary_max_scan", boundaryMaxScan);
|
||||
builder.field(BOUNDARY_MAX_SCAN_FIELD.getPreferredName(), boundaryMaxScan);
|
||||
}
|
||||
if (boundaryChars != null) {
|
||||
builder.field("boundary_chars", boundaryChars);
|
||||
builder.field(BOUNDARY_CHARS_FIELD.getPreferredName(), new String(boundaryChars));
|
||||
}
|
||||
if (options != null && options.size() > 0) {
|
||||
builder.field("options", options);
|
||||
builder.field(OPTIONS_FIELD.getPreferredName(), options);
|
||||
}
|
||||
if (forceSource != null) {
|
||||
builder.field("force_source", forceSource);
|
||||
builder.field(FORCE_SOURCE_FIELD.getPreferredName(), forceSource);
|
||||
}
|
||||
if (requireFieldMatch != null) {
|
||||
builder.field("require_field_match", requireFieldMatch);
|
||||
builder.field(REQUIRE_FIELD_MATCH_FIELD.getPreferredName(), requireFieldMatch);
|
||||
}
|
||||
if (noMatchSize != null) {
|
||||
builder.field("no_match_size", noMatchSize);
|
||||
builder.field(NO_MATCH_SIZE_FIELD.getPreferredName(), noMatchSize);
|
||||
}
|
||||
if (phraseLimit != null) {
|
||||
builder.field("phrase_limit", phraseLimit);
|
||||
builder.field(PHRASE_LIMIT_FIELD.getPreferredName(), phraseLimit);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,12 +20,15 @@
|
||||
package org.elasticsearch.search.highlight;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@ -43,6 +46,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
||||
|
||||
public static final HighlightBuilder PROTOTYPE = new HighlightBuilder();
|
||||
|
||||
public static final String HIGHLIGHT_ELEMENT_NAME = "highlight";
|
||||
|
||||
private final List<Field> fields = new ArrayList<>();
|
||||
|
||||
private String encoder;
|
||||
@ -164,24 +169,140 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("highlight");
|
||||
builder.startObject(HIGHLIGHT_ELEMENT_NAME);
|
||||
innerXContent(builder);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link HighlightBuilder} from the highlighter held by the {@link QueryParseContext}
|
||||
* in {@link org.elasticsearch.common.xcontent.XContent} format
|
||||
*
|
||||
* @param parseContext
|
||||
* the input parse context. The state on the parser contained in
|
||||
* this context will be changed as a side effect of this method
|
||||
* call
|
||||
* @return the new {@link HighlightBuilder}
|
||||
*/
|
||||
public static HighlightBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token;
|
||||
String topLevelFieldName = null;
|
||||
|
||||
HighlightBuilder highlightBuilder = new HighlightBuilder();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
topLevelFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseContext.parseFieldMatcher().match(topLevelFieldName, PRE_TAGS_FIELD)) {
|
||||
List<String> preTagsList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
preTagsList.add(parser.text());
|
||||
}
|
||||
highlightBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()]));
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, POST_TAGS_FIELD)) {
|
||||
List<String> postTagsList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
postTagsList.add(parser.text());
|
||||
}
|
||||
highlightBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()]));
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, FIELDS_FIELD)) {
|
||||
highlightBuilder.useExplicitFieldOrder(true);
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
String highlightFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
if (highlightFieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "If highlighter fields is an array it must contain objects containing a single field");
|
||||
}
|
||||
highlightFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
highlightBuilder.field(Field.fromXContent(highlightFieldName, parseContext));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "If highlighter fields is an array it must contain objects containing a single field");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "cannot parse array with name [{}]", topLevelFieldName);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(topLevelFieldName, ORDER_FIELD)) {
|
||||
highlightBuilder.order(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, TAGS_SCHEMA_FIELD)) {
|
||||
highlightBuilder.tagsSchema(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, HIGHLIGHT_FILTER_FIELD)) {
|
||||
highlightBuilder.highlightFilter(parser.booleanValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, FRAGMENT_SIZE_FIELD)) {
|
||||
highlightBuilder.fragmentSize(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, NUMBER_OF_FRAGMENTS_FIELD)) {
|
||||
highlightBuilder.numOfFragments(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, ENCODER_FIELD)) {
|
||||
highlightBuilder.encoder(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, REQUIRE_FIELD_MATCH_FIELD)) {
|
||||
highlightBuilder.requireFieldMatch(parser.booleanValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, BOUNDARY_MAX_SCAN_FIELD)) {
|
||||
highlightBuilder.boundaryMaxScan(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, BOUNDARY_CHARS_FIELD)) {
|
||||
highlightBuilder.boundaryChars(parser.text().toCharArray());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, TYPE_FIELD)) {
|
||||
highlightBuilder.highlighterType(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, FRAGMENTER_FIELD)) {
|
||||
highlightBuilder.fragmenter(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, NO_MATCH_SIZE_FIELD)) {
|
||||
highlightBuilder.noMatchSize(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, FORCE_SOURCE_FIELD)) {
|
||||
highlightBuilder.forceSource(parser.booleanValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, PHRASE_LIMIT_FIELD)) {
|
||||
highlightBuilder.phraseLimit(parser.intValue());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected fieldname [{}]", topLevelFieldName);
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT && topLevelFieldName != null) {
|
||||
if (parseContext.parseFieldMatcher().match(topLevelFieldName, OPTIONS_FIELD)) {
|
||||
highlightBuilder.options(parser.map());
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, FIELDS_FIELD)) {
|
||||
String highlightFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
highlightFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
highlightBuilder.field(Field.fromXContent(highlightFieldName, parseContext));
|
||||
}
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(topLevelFieldName, HIGHLIGHT_QUERY_FIELD)) {
|
||||
highlightBuilder.highlightQuery(parseContext.parseInnerQueryBuilder());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "cannot parse object with name [{}]", topLevelFieldName);
|
||||
}
|
||||
} else if (topLevelFieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token [{}] after [{}]", token, topLevelFieldName);
|
||||
}
|
||||
}
|
||||
|
||||
if (highlightBuilder.preTags() != null && highlightBuilder.postTags() == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Highlighter global preTags are set, but global postTags are not set");
|
||||
}
|
||||
return highlightBuilder;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void innerXContent(XContentBuilder builder) throws IOException {
|
||||
// first write common options
|
||||
commonOptionsToXContent(builder);
|
||||
// special options for top-level highlighter
|
||||
if (encoder != null) {
|
||||
builder.field("encoder", encoder);
|
||||
builder.field(ENCODER_FIELD.getPreferredName(), encoder);
|
||||
}
|
||||
if (fields.size() > 0) {
|
||||
if (useExplicitFieldOrder) {
|
||||
builder.startArray("fields");
|
||||
builder.startArray(FIELDS_FIELD.getPreferredName());
|
||||
} else {
|
||||
builder.startObject("fields");
|
||||
builder.startObject(FIELDS_FIELD.getPreferredName());
|
||||
}
|
||||
for (Field field : fields) {
|
||||
if (useExplicitFieldOrder) {
|
||||
@ -205,7 +326,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.prettyPrint();
|
||||
toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
toXContent(builder, EMPTY_PARAMS);
|
||||
return builder.string();
|
||||
} catch (Exception e) {
|
||||
return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
|
||||
@ -286,14 +407,90 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
||||
commonOptionsToXContent(builder);
|
||||
// write special field-highlighter options
|
||||
if (fragmentOffset != -1) {
|
||||
builder.field("fragment_offset", fragmentOffset);
|
||||
builder.field(FRAGMENT_OFFSET_FIELD.getPreferredName(), fragmentOffset);
|
||||
}
|
||||
if (matchedFields != null) {
|
||||
builder.field("matched_fields", matchedFields);
|
||||
builder.field(MATCHED_FIELDS_FIELD.getPreferredName(), matchedFields);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
private static HighlightBuilder.Field fromXContent(String fieldname, QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token;
|
||||
|
||||
final HighlightBuilder.Field field = new HighlightBuilder.Field(fieldname);
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, PRE_TAGS_FIELD)) {
|
||||
List<String> preTagsList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
preTagsList.add(parser.text());
|
||||
}
|
||||
field.preTags(preTagsList.toArray(new String[preTagsList.size()]));
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, POST_TAGS_FIELD)) {
|
||||
List<String> postTagsList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
postTagsList.add(parser.text());
|
||||
}
|
||||
field.postTags(postTagsList.toArray(new String[postTagsList.size()]));
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MATCHED_FIELDS_FIELD)) {
|
||||
List<String> matchedFields = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
matchedFields.add(parser.text());
|
||||
}
|
||||
field.matchedFields(matchedFields.toArray(new String[matchedFields.size()]));
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "cannot parse array with name [{}]", currentFieldName);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FRAGMENT_SIZE_FIELD)) {
|
||||
field.fragmentSize(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NUMBER_OF_FRAGMENTS_FIELD)) {
|
||||
field.numOfFragments(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FRAGMENT_OFFSET_FIELD)) {
|
||||
field.fragmentOffset(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, HIGHLIGHT_FILTER_FIELD)) {
|
||||
field.highlightFilter(parser.booleanValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ORDER_FIELD)) {
|
||||
field.order(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, REQUIRE_FIELD_MATCH_FIELD)) {
|
||||
field.requireFieldMatch(parser.booleanValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOUNDARY_MAX_SCAN_FIELD)) {
|
||||
field.boundaryMaxScan(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOUNDARY_CHARS_FIELD)) {
|
||||
field.boundaryChars(parser.text().toCharArray());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
|
||||
field.highlighterType(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FRAGMENTER_FIELD)) {
|
||||
field.fragmenter(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_SIZE_FIELD)) {
|
||||
field.noMatchSize(parser.intValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FORCE_SOURCE_FIELD)) {
|
||||
field.forceSource(parser.booleanValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PHRASE_LIMIT_FIELD)) {
|
||||
field.phraseLimit(parser.intValue());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected fieldname [{}]", currentFieldName);
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT && currentFieldName != null) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, HIGHLIGHT_QUERY_FIELD)) {
|
||||
field.highlightQuery(parseContext.parseInnerQueryBuilder());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIONS_FIELD)) {
|
||||
field.options(parser.map());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "cannot parse object with name [{}]", currentFieldName);
|
||||
}
|
||||
} else if (currentFieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "unexpected token [{}] after [{}]", token, currentFieldName);
|
||||
}
|
||||
}
|
||||
return field;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(name, fragmentOffset, Arrays.hashCode(matchedFields));
|
||||
|
@ -211,7 +211,7 @@ public class HighlighterParseElement implements SearchParseElement {
|
||||
return new SearchContextHighlight(fields);
|
||||
}
|
||||
|
||||
protected SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
private static SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
|
||||
final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
|
||||
|
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.tribe;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* An internal node that connects to a remove cluster, as part of a tribe node.
|
||||
*/
|
||||
class TribeClientNode extends Node {
|
||||
TribeClientNode(Settings settings) {
|
||||
super(new Environment(settings), Version.CURRENT, Collections.<Class<? extends Plugin>>emptyList());
|
||||
}
|
||||
}
|
@ -132,14 +132,14 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
|
||||
nodesSettings.remove("on_conflict"); // remove prefix settings that don't indicate a client
|
||||
for (Map.Entry<String, Settings> entry : nodesSettings.entrySet()) {
|
||||
Settings.Builder sb = Settings.builder().put(entry.getValue());
|
||||
sb.put("node.name", settings.get("name") + "/" + entry.getKey());
|
||||
sb.put("name", settings.get("name") + "/" + entry.getKey());
|
||||
sb.put("path.home", settings.get("path.home")); // pass through ES home dir
|
||||
sb.put(TRIBE_NAME, entry.getKey());
|
||||
sb.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true);
|
||||
if (sb.get("http.enabled") == null) {
|
||||
sb.put("http.enabled", false);
|
||||
}
|
||||
nodes.add(NodeBuilder.nodeBuilder().settings(sb).client(true).build());
|
||||
sb.put("node.client", true);
|
||||
nodes.add(new TribeClientNode(sb.build()));
|
||||
}
|
||||
|
||||
String[] blockIndicesWrite = Strings.EMPTY_ARRAY;
|
||||
|
@ -99,12 +99,18 @@ public class ShardRoutingTests extends ESTestCase {
|
||||
ShardRouting initializingShard0 = TestShardRouting.newShardRouting("test", 0, "node1", randomBoolean(), ShardRoutingState.INITIALIZING, 1);
|
||||
ShardRouting initializingShard1 = TestShardRouting.newShardRouting("test", 1, "node1", randomBoolean(), ShardRoutingState.INITIALIZING, 1);
|
||||
ShardRouting startedShard0 = new ShardRouting(initializingShard0);
|
||||
assertFalse(startedShard0.isRelocationTarget());
|
||||
startedShard0.moveToStarted();
|
||||
assertFalse(startedShard0.isRelocationTarget());
|
||||
ShardRouting startedShard1 = new ShardRouting(initializingShard1);
|
||||
assertFalse(startedShard1.isRelocationTarget());
|
||||
startedShard1.moveToStarted();
|
||||
assertFalse(startedShard1.isRelocationTarget());
|
||||
ShardRouting sourceShard0a = new ShardRouting(startedShard0);
|
||||
sourceShard0a.relocate("node2", -1);
|
||||
assertFalse(sourceShard0a.isRelocationTarget());
|
||||
ShardRouting targetShard0a = sourceShard0a.buildTargetRelocatingShard();
|
||||
assertTrue(targetShard0a.isRelocationTarget());
|
||||
ShardRouting sourceShard0b = new ShardRouting(startedShard0);
|
||||
sourceShard0b.relocate("node2", -1);
|
||||
ShardRouting sourceShard1 = new ShardRouting(startedShard1);
|
||||
|
@ -67,7 +67,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||
}
|
||||
|
||||
public void testString() {
|
||||
createIndex("test", Settings.EMPTY, "field", "value", "type=string");
|
||||
createIndex("test", Settings.EMPTY, "test", "field", "type=string");
|
||||
for (int value = 0; value <= 10; value++) {
|
||||
client().prepareIndex("test", "test").setSource("field", String.format(Locale.ENGLISH, "%03d", value)).get();
|
||||
}
|
||||
@ -85,7 +85,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||
|
||||
public void testDouble() {
|
||||
String fieldName = "field";
|
||||
createIndex("test", Settings.EMPTY, fieldName, "value", "type=double");
|
||||
createIndex("test", Settings.EMPTY, "test", fieldName, "type=double");
|
||||
for (double value = -1; value <= 9; value++) {
|
||||
client().prepareIndex("test", "test").setSource(fieldName, value).get();
|
||||
}
|
||||
@ -102,7 +102,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||
|
||||
public void testFloat() {
|
||||
String fieldName = "field";
|
||||
createIndex("test", Settings.EMPTY, fieldName, "value", "type=float");
|
||||
createIndex("test", Settings.EMPTY, "test", fieldName, "type=float");
|
||||
for (float value = -1; value <= 9; value++) {
|
||||
client().prepareIndex("test", "test").setSource(fieldName, value).get();
|
||||
}
|
||||
@ -112,14 +112,14 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11l));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11l));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1.0));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9.0));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1f));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9f));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Float.toString(-1)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(), equalTo(Float.toString(9)));
|
||||
}
|
||||
|
||||
private void testNumberRange(String fieldName, String fieldType, long min, long max) {
|
||||
createIndex("test", Settings.EMPTY, fieldName, "value", "type=" + fieldType);
|
||||
createIndex("test", Settings.EMPTY, "test", fieldName, "type=" + fieldType);
|
||||
for (long value = min; value <= max; value++) {
|
||||
client().prepareIndex("test", "test").setSource(fieldName, value).get();
|
||||
}
|
||||
@ -180,11 +180,11 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||
}
|
||||
|
||||
public void testInvalidField() {
|
||||
createIndex("test1", Settings.EMPTY, "field1", "value", "type=string");
|
||||
createIndex("test1", Settings.EMPTY, "test", "field1", "type=string");
|
||||
client().prepareIndex("test1", "test").setSource("field1", "a").get();
|
||||
client().prepareIndex("test1", "test").setSource("field1", "b").get();
|
||||
|
||||
createIndex("test2", Settings.EMPTY, "field2", "value", "type=string");
|
||||
createIndex("test2", Settings.EMPTY, "test", "field2", "type=string");
|
||||
client().prepareIndex("test2", "test").setSource("field2", "a").get();
|
||||
client().prepareIndex("test2", "test").setSource("field2", "b").get();
|
||||
client().admin().indices().prepareRefresh().get();
|
||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -28,15 +29,21 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class DynamicMappingTests extends ESSingleNodeTestCase {
|
||||
@ -407,4 +414,26 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
||||
// expected
|
||||
}
|
||||
}
|
||||
|
||||
public void testDefaultFloatingPointMappings() throws IOException {
|
||||
DocumentMapper mapper = createIndex("test").mapperService().documentMapperWithAutoCreate("type").getDocumentMapper();
|
||||
doTestDefaultFloatingPointMappings(mapper, XContentFactory.jsonBuilder());
|
||||
doTestDefaultFloatingPointMappings(mapper, XContentFactory.yamlBuilder());
|
||||
doTestDefaultFloatingPointMappings(mapper, XContentFactory.smileBuilder());
|
||||
doTestDefaultFloatingPointMappings(mapper, XContentFactory.cborBuilder());
|
||||
}
|
||||
|
||||
private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentBuilder builder) throws IOException {
|
||||
BytesReference source = builder.startObject()
|
||||
.field("foo", 3.2f) // float
|
||||
.field("bar", 3.2d) // double
|
||||
.field("baz", (double) 3.2f) // double that can be accurately represented as a float
|
||||
.endObject().bytes();
|
||||
ParsedDocument parsedDocument = mapper.parse("index", "type", "id", source);
|
||||
Mapping update = parsedDocument.dynamicMappingsUpdate();
|
||||
assertNotNull(update);
|
||||
assertThat(update.root().getMapper("foo"), instanceOf(FloatFieldMapper.class));
|
||||
assertThat(update.root().getMapper("bar"), instanceOf(FloatFieldMapper.class));
|
||||
assertThat(update.root().getMapper("baz"), instanceOf(FloatFieldMapper.class));
|
||||
}
|
||||
}
|
||||
|
@ -281,7 +281,7 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
||||
public void testCheckTypeName() {
|
||||
final MappedFieldType fieldType = createNamedDefaultFieldType();
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
fieldType.checkTypeName(fieldType, conflicts);
|
||||
fieldType.checkCompatibility(fieldType, conflicts, random().nextBoolean()); // no exception
|
||||
assertTrue(conflicts.toString(), conflicts.isEmpty());
|
||||
|
||||
MappedFieldType bogus = new MappedFieldType() {
|
||||
@ -291,7 +291,7 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
||||
public String typeName() { return fieldType.typeName();}
|
||||
};
|
||||
try {
|
||||
fieldType.checkTypeName(bogus, conflicts);
|
||||
fieldType.checkCompatibility(bogus, conflicts, random().nextBoolean());
|
||||
fail("expected bad types exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage().contains("Type names equal"));
|
||||
@ -304,10 +304,13 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
||||
@Override
|
||||
public String typeName() { return "othertype";}
|
||||
};
|
||||
fieldType.checkTypeName(other, conflicts);
|
||||
assertFalse(conflicts.isEmpty());
|
||||
assertTrue(conflicts.get(0).contains("cannot be changed from type"));
|
||||
assertEquals(1, conflicts.size());
|
||||
try {
|
||||
fieldType.checkCompatibility(other, conflicts, random().nextBoolean());
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("cannot be changed from type"));
|
||||
}
|
||||
assertTrue(conflicts.toString(), conflicts.isEmpty());
|
||||
}
|
||||
|
||||
public void testCheckCompatibility() {
|
||||
|
@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
|
||||
public class MultiFieldCopyToMapperTests extends ESTestCase {
|
||||
|
||||
public void testExceptionForCopyToInMultiFields() throws IOException {
|
||||
XContentBuilder mapping = createMappinmgWithCopyToInMultiField();
|
||||
Tuple<List<Version>, List<Version>> versionsWithAndWithoutExpectedExceptions = versionsWithAndWithoutExpectedExceptions();
|
||||
|
||||
// first check that for newer versions we throw exception if copy_to is found withing multi field
|
||||
Version indexVersion = randomFrom(versionsWithAndWithoutExpectedExceptions.v1());
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build());
|
||||
try {
|
||||
mapperService.parse("type", new CompressedXContent(mapping.string()), true);
|
||||
fail("Parsing should throw an exception because the mapping contains a copy_to in a multi field");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), equalTo("copy_to in multi fields is not allowed. Found the copy_to in field [c] which is within a multi field."));
|
||||
}
|
||||
|
||||
// now test that with an older version the pasring just works
|
||||
indexVersion = randomFrom(versionsWithAndWithoutExpectedExceptions.v2());
|
||||
mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build());
|
||||
DocumentMapper documentMapper = mapperService.parse("type", new CompressedXContent(mapping.string()), true);
|
||||
assertFalse(documentMapper.mapping().toString().contains("copy_to"));
|
||||
}
|
||||
|
||||
private static XContentBuilder createMappinmgWithCopyToInMultiField() throws IOException {
|
||||
XContentBuilder mapping = jsonBuilder();
|
||||
mapping.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("a")
|
||||
.field("type", "string")
|
||||
.endObject()
|
||||
.startObject("b")
|
||||
.field("type", "string")
|
||||
.startObject("fields")
|
||||
.startObject("c")
|
||||
.field("type", "string")
|
||||
.field("copy_to", "a")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
return mapping;
|
||||
}
|
||||
|
||||
// returs a tuple where
|
||||
// v1 is a list of versions for which we expect an excpetion when a copy_to in multi fields is found and
|
||||
// v2 is older versions where we throw no exception and we just log a warning
|
||||
private static Tuple<List<Version>, List<Version>> versionsWithAndWithoutExpectedExceptions() {
|
||||
List<Version> versionsWithException = new ArrayList<>();
|
||||
List<Version> versionsWithoutException = new ArrayList<>();
|
||||
for (Version version : VersionUtils.allVersions()) {
|
||||
if (version.after(Version.V_2_1_0) ||
|
||||
(version.after(Version.V_2_0_1) && version.before(Version.V_2_1_0))) {
|
||||
versionsWithException.add(version);
|
||||
} else {
|
||||
versionsWithoutException.add(version);
|
||||
}
|
||||
}
|
||||
return new Tuple<>(versionsWithException, versionsWithoutException);
|
||||
}
|
||||
}
|
@ -24,6 +24,8 @@ import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
@ -41,9 +43,11 @@ import org.elasticsearch.index.mapper.string.SimpleStringMappingTests;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
@ -510,4 +514,62 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
|
||||
assertThat(ts, instanceOf(NumericTokenStream.class));
|
||||
assertEquals(expected, ((NumericTokenStream)ts).getPrecisionStep());
|
||||
}
|
||||
|
||||
public void testTermVectorsBackCompat() throws Exception {
|
||||
for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
|
||||
doTestTermVectorsBackCompat(type);
|
||||
}
|
||||
}
|
||||
|
||||
private void doTestTermVectorsBackCompat(String type) throws Exception {
|
||||
DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser();
|
||||
String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", type)
|
||||
.field("term_vector", "yes")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
try {
|
||||
parser.parse(mappingWithTV);
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]"));
|
||||
}
|
||||
|
||||
Settings oldIndexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0)
|
||||
.build();
|
||||
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
|
||||
parser.parse(mappingWithTV); // no exception
|
||||
}
|
||||
|
||||
public void testAnalyzerBackCompat() throws Exception {
|
||||
for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) {
|
||||
doTestAnalyzerBackCompat(type);
|
||||
}
|
||||
}
|
||||
|
||||
private void doTestAnalyzerBackCompat(String type) throws Exception {
|
||||
DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser();
|
||||
String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", type)
|
||||
.field("analyzer", "keyword")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
try {
|
||||
parser.parse(mappingWithTV);
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]"));
|
||||
}
|
||||
|
||||
Settings oldIndexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0)
|
||||
.build();
|
||||
parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser();
|
||||
parser.parse(mappingWithTV); // no exception
|
||||
}
|
||||
}
|
||||
|
@ -151,7 +151,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
assertTrue(e.getMessage().contains("conflicts with existing mapping in other types"));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
}
|
||||
|
||||
try {
|
||||
@ -159,7 +159,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
assertTrue(e.getMessage().contains("conflicts with existing mapping in other types"));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
}
|
||||
|
||||
assertTrue(mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper);
|
||||
@ -186,7 +186,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
assertTrue(e.getMessage().contains("conflicts with existing mapping in other types"));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
}
|
||||
|
||||
try {
|
||||
@ -194,7 +194,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
assertTrue(e.getMessage().contains("conflicts with existing mapping in other types"));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
}
|
||||
|
||||
assertTrue(mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper);
|
||||
|
@ -19,14 +19,29 @@
|
||||
|
||||
package org.elasticsearch.search.highlight;
|
||||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.IdsQueryBuilder;
|
||||
import org.elasticsearch.index.query.IdsQueryParser;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchAllQueryParser;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParser;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.index.query.TermQueryParser;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder.Field;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.AfterClass;
|
||||
@ -35,8 +50,10 @@ import org.junit.BeforeClass;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
@ -45,23 +62,26 @@ public class HighlightBuilderTests extends ESTestCase {
|
||||
|
||||
private static final int NUMBER_OF_TESTBUILDERS = 20;
|
||||
private static NamedWriteableRegistry namedWriteableRegistry;
|
||||
private static IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
|
||||
/**
|
||||
* setup for the whole base test class
|
||||
*/
|
||||
@BeforeClass
|
||||
public static void init() {
|
||||
if (namedWriteableRegistry == null) {
|
||||
namedWriteableRegistry = new NamedWriteableRegistry();
|
||||
namedWriteableRegistry.registerPrototype(QueryBuilder.class, new MatchAllQueryBuilder());
|
||||
namedWriteableRegistry.registerPrototype(QueryBuilder.class, new IdsQueryBuilder());
|
||||
namedWriteableRegistry.registerPrototype(QueryBuilder.class, new TermQueryBuilder("field", "value"));
|
||||
}
|
||||
@SuppressWarnings("rawtypes")
|
||||
Set<QueryParser> injectedQueryParsers = new HashSet<>();
|
||||
injectedQueryParsers.add(new MatchAllQueryParser());
|
||||
injectedQueryParsers.add(new IdsQueryParser());
|
||||
injectedQueryParsers.add(new TermQueryParser());
|
||||
indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), injectedQueryParsers, namedWriteableRegistry);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
namedWriteableRegistry = null;
|
||||
indicesQueriesRegistry = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -107,6 +127,196 @@ public class HighlightBuilderTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic test that creates new highlighter from the test highlighter and checks both for equality
|
||||
*/
|
||||
public void testFromXContent() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
HighlightBuilder highlightBuilder = randomHighlighterBuilder();
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
|
||||
if (randomBoolean()) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
builder.startObject();
|
||||
highlightBuilder.innerXContent(builder);
|
||||
builder.endObject();
|
||||
|
||||
XContentParser parser = XContentHelper.createParser(builder.bytes());
|
||||
context.reset(parser);
|
||||
HighlightBuilder secondHighlightBuilder = HighlightBuilder.fromXContent(context);
|
||||
assertNotSame(highlightBuilder, secondHighlightBuilder);
|
||||
assertEquals(highlightBuilder, secondHighlightBuilder);
|
||||
assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test that unknown array fields cause exception
|
||||
*/
|
||||
public void testUnknownArrayNameExpection() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
String highlightElement = "{\n" +
|
||||
" \"bad_fieldname\" : [ \"field1\" 1 \"field2\" ]\n" +
|
||||
"}\n";
|
||||
XContentParser parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
try {
|
||||
HighlightBuilder.fromXContent(context);
|
||||
fail("expected a parsing exception");
|
||||
} catch (ParsingException e) {
|
||||
assertEquals("cannot parse array with name [bad_fieldname]", e.getMessage());
|
||||
}
|
||||
|
||||
highlightElement = "{\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"body\" : {\n" +
|
||||
" \"bad_fieldname\" : [ \"field1\" , \"field2\" ]\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
try {
|
||||
HighlightBuilder.fromXContent(context);
|
||||
fail("expected a parsing exception");
|
||||
} catch (ParsingException e) {
|
||||
assertEquals("cannot parse array with name [bad_fieldname]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test that unknown field name cause exception
|
||||
*/
|
||||
public void testUnknownFieldnameExpection() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
String highlightElement = "{\n" +
|
||||
" \"bad_fieldname\" : \"value\"\n" +
|
||||
"}\n";
|
||||
XContentParser parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
try {
|
||||
HighlightBuilder.fromXContent(context);
|
||||
fail("expected a parsing exception");
|
||||
} catch (ParsingException e) {
|
||||
assertEquals("unexpected fieldname [bad_fieldname]", e.getMessage());
|
||||
}
|
||||
|
||||
highlightElement = "{\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"body\" : {\n" +
|
||||
" \"bad_fieldname\" : \"value\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
try {
|
||||
HighlightBuilder.fromXContent(context);
|
||||
fail("expected a parsing exception");
|
||||
} catch (ParsingException e) {
|
||||
assertEquals("unexpected fieldname [bad_fieldname]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* test that unknown field name cause exception
|
||||
*/
|
||||
public void testUnknownObjectFieldnameExpection() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
String highlightElement = "{\n" +
|
||||
" \"bad_fieldname\" : { \"field\" : \"value\" }\n \n" +
|
||||
"}\n";
|
||||
XContentParser parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
try {
|
||||
HighlightBuilder.fromXContent(context);
|
||||
fail("expected a parsing exception");
|
||||
} catch (ParsingException e) {
|
||||
assertEquals("cannot parse object with name [bad_fieldname]", e.getMessage());
|
||||
}
|
||||
|
||||
highlightElement = "{\n" +
|
||||
" \"fields\" : {\n" +
|
||||
" \"body\" : {\n" +
|
||||
" \"bad_fieldname\" : { \"field\" : \"value\" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
try {
|
||||
HighlightBuilder.fromXContent(context);
|
||||
fail("expected a parsing exception");
|
||||
} catch (ParsingException e) {
|
||||
assertEquals("cannot parse object with name [bad_fieldname]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* `tags_schema` is not produced by toXContent in the builder but should be parseable, so this
|
||||
* adds a simple json test for this.
|
||||
*/
|
||||
public void testParsingTagsSchema() throws IOException {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
String highlightElement = "{\n" +
|
||||
" \"tags_schema\" : \"styled\"\n" +
|
||||
"}\n";
|
||||
XContentParser parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context);
|
||||
assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlighterParseElement.STYLED_PRE_TAG,
|
||||
highlightBuilder.preTags());
|
||||
assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlighterParseElement.STYLED_POST_TAGS,
|
||||
highlightBuilder.postTags());
|
||||
|
||||
highlightElement = "{\n" +
|
||||
" \"tags_schema\" : \"default\"\n" +
|
||||
"}\n";
|
||||
parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
highlightBuilder = HighlightBuilder.fromXContent(context);
|
||||
assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlighterParseElement.DEFAULT_PRE_TAGS,
|
||||
highlightBuilder.preTags());
|
||||
assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlighterParseElement.DEFAULT_POST_TAGS,
|
||||
highlightBuilder.postTags());
|
||||
|
||||
highlightElement = "{\n" +
|
||||
" \"tags_schema\" : \"somthing_else\"\n" +
|
||||
"}\n";
|
||||
parser = XContentFactory.xContent(highlightElement).createParser(highlightElement);
|
||||
|
||||
context.reset(parser);
|
||||
try {
|
||||
highlightBuilder = HighlightBuilder.fromXContent(context);
|
||||
fail("setting unknown tag schema should throw exception");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals("Unknown tag schema [somthing_else]", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
protected static XContentBuilder toXContent(HighlightBuilder highlight, XContentType contentType) throws IOException {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
|
||||
if (randomBoolean()) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
highlight.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* create random shape that is put under test
|
||||
*/
|
||||
@ -132,11 +342,11 @@ public class HighlightBuilderTests extends ESTestCase {
|
||||
return testHighlighter;
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
private static void setRandomCommonOptions(AbstractHighlighterBuilder highlightBuilder) {
|
||||
if (randomBoolean()) {
|
||||
// need to set this together, otherwise parsing will complain
|
||||
highlightBuilder.preTags(randomStringArray(0, 3));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
highlightBuilder.postTags(randomStringArray(0, 3));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
@ -213,7 +423,7 @@ public class HighlightBuilderTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private static void mutateCommonOptions(AbstractHighlighterBuilder highlightBuilder) {
|
||||
switch (randomIntBetween(1, 16)) {
|
||||
case 1:
|
||||
@ -242,6 +452,7 @@ public class HighlightBuilderTests extends ESTestCase {
|
||||
break;
|
||||
case 9:
|
||||
highlightBuilder.highlightFilter(toggleOrSet(highlightBuilder.highlightFilter()));
|
||||
break;
|
||||
case 10:
|
||||
highlightBuilder.forceSource(toggleOrSet(highlightBuilder.forceSource()));
|
||||
break;
|
||||
@ -316,6 +527,7 @@ public class HighlightBuilderTests extends ESTestCase {
|
||||
fieldToChange.matchedFields(randomStringArray(5, 10));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return mutation;
|
||||
|
@ -429,3 +429,11 @@ to use the old default of 0. This was done to prevent phrase queries from
|
||||
matching across different values of the same term unexpectedly. Specifically,
|
||||
100 was chosen to cause phrase queries with slops up to 99 to match only within
|
||||
a single value of a field.
|
||||
|
||||
==== copy_to and multi fields
|
||||
|
||||
A <<copy-to,copy_to>> within a <<multi-fields,multi field>> is ignored from version 2.0 on. With any version after
|
||||
2.1 or 2.0.1 creating a mapping that has a copy_to within a multi field will result
|
||||
in an exception.
|
||||
|
||||
|
||||
|
@ -183,6 +183,22 @@ Previously, there were three settings for the ping timeout: `discovery.zen.initi
|
||||
the only setting key for the ping timeout is now `discovery.zen.ping_timeout`. The default value for
|
||||
ping timeouts remains at three seconds.
|
||||
|
||||
|
||||
==== Recovery settings
|
||||
|
||||
Recovery settings deprecated in 1.x have been removed:
|
||||
|
||||
* `index.shard.recovery.translog_size` is superseded by `indices.recovery.translog_size`
|
||||
* `index.shard.recovery.translog_ops` is superseded by `indices.recovery.translog_ops`
|
||||
* `index.shard.recovery.file_chunk_size` is superseded by `indices.recovery.file_chunk_size`
|
||||
* `index.shard.recovery.concurrent_streams` is superseded by `indices.recovery.concurrent_streams`
|
||||
* `index.shard.recovery.concurrent_small_file_streams` is superseded by `indices.recovery.concurrent_small_file_streams`
|
||||
* `indices.recovery.max_size_per_sec` is superseded by `indices.recovery.max_bytes_per_sec`
|
||||
|
||||
If you are using any of these settings please take the time and review their purpose. All of the settings above are considered
|
||||
_expert settings_ and should only be used if absolutely necessary. If you have set any of the above setting as persistent
|
||||
cluster settings please use the settings update API and set their superseded keys accordingly.
|
||||
|
||||
[[breaking_30_mapping_changes]]
|
||||
=== Mapping changes
|
||||
|
||||
@ -190,6 +206,13 @@ ping timeouts remains at three seconds.
|
||||
|
||||
The `transform` feature from mappings has been removed. It made issues very hard to debug.
|
||||
|
||||
==== Default number mappings
|
||||
|
||||
When a floating-point number is encountered, it is now dynamically mapped as a
|
||||
float by default instead of a double. The reasoning is that floats should be
|
||||
more than enough for most cases but would decrease storage requirements
|
||||
significantly.
|
||||
|
||||
[[breaking_30_plugins]]
|
||||
=== Plugin changes
|
||||
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.mapper.attachments;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -29,6 +30,12 @@ public class AttachmentUnitTestCase extends ESTestCase {
|
||||
|
||||
protected Settings testSettings;
|
||||
|
||||
protected static IndicesModule getIndicesModuleWithRegisteredAttachmentMapper() {
|
||||
IndicesModule indicesModule = new IndicesModule();
|
||||
indicesModule.registerMapper(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
|
||||
return indicesModule;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void createSettings() throws Exception {
|
||||
testSettings = Settings.builder()
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.mapper.attachments;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
@ -37,7 +38,7 @@ public class DateAttachmentMapperTests extends AttachmentUnitTestCase {
|
||||
|
||||
@Before
|
||||
public void setupMapperParser() throws Exception {
|
||||
mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
|
||||
mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
}
|
||||
|
||||
public void testSimpleMappings() throws Exception {
|
||||
|
@ -21,11 +21,11 @@ package org.elasticsearch.mapper.attachments;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.mapper.attachments.AttachmentMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.*;
|
||||
public class EncryptedDocMapperTests extends AttachmentUnitTestCase {
|
||||
|
||||
public void testMultipleDocsEncryptedLast() throws IOException {
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json");
|
||||
DocumentMapper docMapper = mapperParser.parse(mapping);
|
||||
@ -72,7 +72,7 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase {
|
||||
}
|
||||
|
||||
public void testMultipleDocsEncryptedFirst() throws IOException {
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json");
|
||||
DocumentMapper docMapper = mapperParser.parse(mapping);
|
||||
byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html");
|
||||
@ -103,9 +103,8 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase {
|
||||
public void testMultipleDocsEncryptedNotIgnoringErrors() throws IOException {
|
||||
try {
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(),
|
||||
Settings.builder()
|
||||
.put("index.mapping.attachment.ignore_errors", false)
|
||||
.build()).documentMapperParser();
|
||||
Settings.builder().put("index.mapping.attachment.ignore_errors", false).build(),
|
||||
getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json");
|
||||
DocumentMapper docMapper = mapperParser.parse(mapping);
|
||||
|
@ -21,11 +21,11 @@ package org.elasticsearch.mapper.attachments;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.mapper.attachments.AttachmentMapper;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -50,9 +50,8 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa
|
||||
|
||||
public void setupMapperParser(boolean langDetect) throws IOException {
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(),
|
||||
Settings.settingsBuilder()
|
||||
.put("index.mapping.attachment.detect_language", langDetect)
|
||||
.build()).documentMapperParser();
|
||||
Settings.settingsBuilder().put("index.mapping.attachment.detect_language", langDetect).build(),
|
||||
getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/language/language-mapping.json");
|
||||
docMapper = mapperParser.parse(mapping);
|
||||
|
||||
|
@ -21,11 +21,11 @@ package org.elasticsearch.mapper.attachments;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.mapper.attachments.AttachmentMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -44,7 +44,7 @@ public class MetadataMapperTests extends AttachmentUnitTestCase {
|
||||
.put(this.testSettings)
|
||||
.put(otherSettings)
|
||||
.build();
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), settings).documentMapperParser();
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), settings, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/metadata/test-mapping.json");
|
||||
DocumentMapper docMapper = mapperParser.parse(mapping);
|
||||
|
@ -22,13 +22,13 @@ package org.elasticsearch.mapper.attachments;
|
||||
import org.elasticsearch.common.Base64;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.mapper.attachments.AttachmentMapper;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
@ -48,7 +48,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase {
|
||||
|
||||
@Before
|
||||
public void setupMapperParser() throws Exception {
|
||||
mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
|
||||
mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
|
||||
}
|
||||
|
||||
@ -91,7 +91,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase {
|
||||
String bytes = Base64.encodeBytes(originalText.getBytes(StandardCharsets.ISO_8859_1));
|
||||
threadPool = new ThreadPool("testing-only");
|
||||
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY);
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper());
|
||||
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json");
|
||||
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
@ -42,7 +43,7 @@ import static org.hamcrest.Matchers.*;
|
||||
public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase {
|
||||
|
||||
public void testSimpleMappings() throws Exception {
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json");
|
||||
DocumentMapper docMapper = mapperParser.parse(mapping);
|
||||
byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html");
|
||||
@ -69,9 +70,8 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase {
|
||||
|
||||
public void testContentBackcompat() throws Exception {
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(),
|
||||
Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
|
||||
.build()).documentMapperParser();
|
||||
Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(),
|
||||
getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json");
|
||||
DocumentMapper docMapper = mapperParser.parse(mapping);
|
||||
byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html");
|
||||
@ -86,7 +86,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase {
|
||||
* test for https://github.com/elastic/elasticsearch-mapper-attachments/issues/179
|
||||
*/
|
||||
public void testSimpleMappingsWithAllFields() throws Exception {
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json");
|
||||
DocumentMapper docMapper = mapperParser.parse(mapping);
|
||||
byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html");
|
||||
@ -131,7 +131,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase {
|
||||
.endObject();
|
||||
|
||||
byte[] mapping = mappingBuilder.bytes().toBytes();
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY);
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper());
|
||||
DocumentMapper docMapper = mapperService.parse("mail", new CompressedXContent(mapping), true);
|
||||
// this should not throw an exception
|
||||
mapperService.parse("mail", new CompressedXContent(docMapper.mapping().toString()), true);
|
||||
|
@ -30,10 +30,10 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.mapper.attachments.AttachmentMapper;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
@ -46,6 +46,7 @@ import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd;
|
||||
import static org.elasticsearch.common.cli.CliToolConfig.Builder.option;
|
||||
import static org.elasticsearch.common.io.Streams.copy;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.mapper.attachments.AttachmentUnitTestCase.getIndicesModuleWithRegisteredAttachmentMapper;
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
|
||||
/**
|
||||
@ -88,7 +89,7 @@ public class StandaloneRunner extends CliTool {
|
||||
this.size = size;
|
||||
this.url = url;
|
||||
this.base64text = base64text;
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(PathUtils.get("."), Settings.EMPTY).documentMapperParser(); // use CWD b/c it won't be used
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(PathUtils.get("."), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); // use CWD b/c it won't be used
|
||||
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/standalone/standalone-mapping.json");
|
||||
docMapper = mapperParser.parse(mapping);
|
||||
|
@ -23,10 +23,10 @@ import org.apache.tika.io.IOUtils;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.mapper.attachments.AttachmentMapper;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -48,7 +48,7 @@ public class VariousDocTests extends AttachmentUnitTestCase {
|
||||
|
||||
@Before
|
||||
public void createMapper() throws IOException {
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
|
||||
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser();
|
||||
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/various-doc/test-mapping.json");
|
||||
docMapper = mapperParser.parse(mapping);
|
||||
|
@ -54,13 +54,12 @@ public class TribeUnitTests extends ESTestCase {
|
||||
@BeforeClass
|
||||
public static void createTribes() {
|
||||
Settings baseSettings = Settings.builder()
|
||||
.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true)
|
||||
.put("http.enabled", false)
|
||||
.put("node.mode", NODE_MODE)
|
||||
.put("path.home", createTempDir()).build();
|
||||
|
||||
tribe1 = NodeBuilder.nodeBuilder().settings(Settings.builder().put(baseSettings).put("cluster.name", "tribe1").put("node.name", "tribe1_node")).node();
|
||||
tribe2 = NodeBuilder.nodeBuilder().settings(Settings.builder().put(baseSettings).put("cluster.name", "tribe2").put("node.name", "tribe2_node")).node();
|
||||
tribe1 = new TribeClientNode(Settings.builder().put(baseSettings).put("cluster.name", "tribe1").put("name", "tribe1_node").build()).start();
|
||||
tribe2 = new TribeClientNode(Settings.builder().put(baseSettings).put("cluster.name", "tribe2").put("name", "tribe2_node").build()).start();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
@ -148,7 +148,7 @@
|
||||
index \s+
|
||||
filter \s+
|
||||
routing.index \s+
|
||||
routing.search \s+
|
||||
routing.search
|
||||
\n
|
||||
test_1 \s+
|
||||
test \s+
|
||||
@ -185,6 +185,6 @@
|
||||
- match:
|
||||
$body: |
|
||||
/^
|
||||
index \s+ alias \s+ \n
|
||||
test \s+ test_1 \s+ \n
|
||||
index \s+ alias \n
|
||||
test \s+ test_1 \n
|
||||
$/
|
||||
|
@ -71,7 +71,7 @@
|
||||
(
|
||||
\s* #allow leading spaces to account for right-justified text
|
||||
\d+ \s+
|
||||
UNASSIGNED \s+
|
||||
UNASSIGNED
|
||||
\n
|
||||
)?
|
||||
$/
|
||||
@ -134,7 +134,7 @@
|
||||
(
|
||||
\s* #allow leading spaces to account for right-justified text
|
||||
\d+ \s+
|
||||
UNASSIGNED \s+
|
||||
UNASSIGNED
|
||||
\n
|
||||
)?
|
||||
$/
|
||||
@ -156,7 +156,7 @@
|
||||
disk.percent \s+
|
||||
host \s+
|
||||
ip \s+
|
||||
node \s+
|
||||
node
|
||||
\n
|
||||
|
||||
( \s* #allow leading spaces to account for right-justified text
|
||||
@ -199,7 +199,7 @@
|
||||
$body: |
|
||||
/^
|
||||
disk.percent \s+
|
||||
node \s+
|
||||
node
|
||||
\n
|
||||
(
|
||||
\s+\d* \s+
|
||||
|
@ -19,7 +19,7 @@
|
||||
- match:
|
||||
$body: |
|
||||
/# epoch timestamp count
|
||||
^ \d+ \s \d{2}:\d{2}:\d{2} \s 0 \s+ \n $/
|
||||
^ \d+ \s \d{2}:\d{2}:\d{2} \s 0 \n$/
|
||||
|
||||
- do:
|
||||
index:
|
||||
@ -35,7 +35,7 @@
|
||||
- match:
|
||||
$body: |
|
||||
/# epoch timestamp count
|
||||
^ \d+ \s \d{2}:\d{2}:\d{2} \s 1 \s+ \n $/
|
||||
^ \d+ \s \d{2}:\d{2}:\d{2} \s 1 \n $/
|
||||
|
||||
- do:
|
||||
index:
|
||||
@ -52,7 +52,7 @@
|
||||
- match:
|
||||
$body: |
|
||||
/# count
|
||||
^ 2 \s+ \n $/
|
||||
^ 2 \n $/
|
||||
|
||||
|
||||
- do:
|
||||
@ -62,7 +62,7 @@
|
||||
- match:
|
||||
$body: |
|
||||
/# epoch timestamp count
|
||||
^ \d+ \s \d{2}:\d{2}:\d{2} \s 1 \s+ \n $/
|
||||
^ \d+ \s \d{2}:\d{2}:\d{2} \s 1 \n $/
|
||||
|
||||
- do:
|
||||
cat.count:
|
||||
@ -71,5 +71,5 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ epoch \s+ timestamp \s+ count \s+ \n
|
||||
\d+ \s+ \d{2}:\d{2}:\d{2} \s+ \d+ \s+ \n $/
|
||||
/^ epoch \s+ timestamp \s+ count \n
|
||||
\d+ \s+ \d{2}:\d{2}:\d{2} \s+ \d+ \n $/
|
||||
|
@ -38,8 +38,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ total \s \n
|
||||
(\s*\d+(\.\d+)?[gmk]?b \s \n)+ $/
|
||||
/^ total \n
|
||||
(\s*\d+(\.\d+)?[gmk]?b \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.fielddata:
|
||||
@ -48,8 +48,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ total \s+ foo \s+ \n
|
||||
(\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \s \n)+ \s*$/
|
||||
/^ total \s+ foo \n
|
||||
(\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.fielddata:
|
||||
@ -59,5 +59,5 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ total \s+ foo \s+ \n
|
||||
(\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \s \n)+ \s*$/
|
||||
/^ total \s+ foo \n
|
||||
(\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \n)+ $/
|
||||
|
@ -46,7 +46,7 @@
|
||||
\d+ \s+ # unassign
|
||||
\d+ \s+ # pending_tasks
|
||||
(-|\d+[.]\d+ms|s) \s+ # max task waiting time
|
||||
\d+\.\d+% \s+ # active shards percent
|
||||
\d+\.\d+% # active shards percent
|
||||
\n
|
||||
)+
|
||||
$/
|
||||
|
@ -7,7 +7,7 @@
|
||||
- match:
|
||||
$body: |
|
||||
/ #host ip heap.percent ram.percent cpu load node.role master name
|
||||
^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \s+ \n)+ $/
|
||||
^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.nodes:
|
||||
@ -15,8 +15,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \s+ \n
|
||||
(\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \s+ \n)+ $/
|
||||
/^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \n
|
||||
(\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.nodes:
|
||||
@ -25,8 +25,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ heap\.current \s+ heap\.percent \s+ heap\.max \s+ \n
|
||||
(\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \s+ \n)+ $/
|
||||
/^ heap\.current \s+ heap\.percent \s+ heap\.max \n
|
||||
(\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.nodes:
|
||||
@ -35,8 +35,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ heap\.current \s+ heap\.percent \s+ heap\.max \s+ \n
|
||||
(\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \s+ \n)+ $/
|
||||
/^ heap\.current \s+ heap\.percent \s+ heap\.max \n
|
||||
(\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.nodes:
|
||||
@ -46,5 +46,5 @@
|
||||
- match:
|
||||
# Windows reports -1 for the file descriptor counts.
|
||||
$body: |
|
||||
/^ file_desc\.current \s+ file_desc\.percent \s+ file_desc\.max \s+ \n
|
||||
(\s+ (-1|\d+) \s+ \d+ \s+ (-1|\d+) \s+ \n)+ $/
|
||||
/^ file_desc\.current \s+ file_desc\.percent \s+ file_desc\.max \n
|
||||
(\s+ (-1|\d+) \s+ \d+ \s+ (-1|\d+) \n)+ $/
|
||||
|
@ -42,7 +42,7 @@
|
||||
\d+ \s+ # total_bytes
|
||||
\d+ \s+ # translog
|
||||
-?\d+\.\d+% \s+ # translog_percent
|
||||
-?\d+ \s+ # total_translog
|
||||
-?\d+ # total_translog
|
||||
\n
|
||||
)+
|
||||
$/
|
||||
|
@ -108,7 +108,7 @@
|
||||
h: index,state,sync_id
|
||||
- match:
|
||||
$body: |
|
||||
/^(sync_id_test\s+STARTED\s+[A-Za-z0-9_\-]{20}\s+\n){5}$/
|
||||
/^(sync_id_test\s+STARTED\s+[A-Za-z0-9_\-]{20}\n){5}$/
|
||||
|
||||
- do:
|
||||
indices.delete:
|
||||
|
@ -7,7 +7,7 @@
|
||||
- match:
|
||||
$body: |
|
||||
/ #host ip bulk.active bulk.queue bulk.rejected index.active index.queue index.rejected search.active search.queue search.rejected
|
||||
^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \n)+ $/
|
||||
^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -15,8 +15,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ host \s+ ip \s+ bulk.active \s+ bulk.queue \s+ bulk.rejected \s+ index.active \s+ index.queue \s+ index.rejected \s+ search.active \s+ search.queue \s+ search.rejected \s+ \n
|
||||
(\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \n)+ $/
|
||||
/^ host \s+ ip \s+ bulk.active \s+ bulk.queue \s+ bulk.rejected \s+ index.active \s+ index.queue \s+ index.rejected \s+ search.active \s+ search.queue \s+ search.rejected \n
|
||||
(\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -25,7 +25,7 @@
|
||||
- match:
|
||||
$body: |
|
||||
/ #pid id host ip port
|
||||
^ (\d+ \s+ \S{4} \s+ \S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ (\d+|-) \s+ \n)+ $/
|
||||
^ (\d+ \s+ \S{4} \s+ \S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ (\d+|-) \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -35,8 +35,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ ba \s+ fa \s+ gea \s+ ga \s+ ia \s+ maa \s+ fma \s+ pa \s+ \n
|
||||
(\S+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \n)+ $/
|
||||
/^ id \s+ ba \s+ fa \s+ gea \s+ ga \s+ ia \s+ maa \s+ fma \s+ pa \n
|
||||
(\S+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -45,8 +45,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ bulk.type \s+ bulk.active \s+ bulk.size \s+ bulk.queue \s+ bulk.queueSize \s+ bulk.rejected \s+ bulk.largest \s+ bulk.completed \s+ bulk.min \s+ bulk.max \s+ bulk.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ bulk.type \s+ bulk.active \s+ bulk.size \s+ bulk.queue \s+ bulk.queueSize \s+ bulk.rejected \s+ bulk.largest \s+ bulk.completed \s+ bulk.min \s+ bulk.max \s+ bulk.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -55,8 +55,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ flush.type \s+ flush.active \s+ flush.size \s+ flush.queue \s+ flush.queueSize \s+ flush.rejected \s+ flush.largest \s+ flush.completed \s+ flush.min \s+ flush.max \s+ flush.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ flush.type \s+ flush.active \s+ flush.size \s+ flush.queue \s+ flush.queueSize \s+ flush.rejected \s+ flush.largest \s+ flush.completed \s+ flush.min \s+ flush.max \s+ flush.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -65,8 +65,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ generic.type \s+ generic.active \s+ generic.size \s+ generic.queue \s+ generic.queueSize \s+ generic.rejected \s+ generic.largest \s+ generic.completed \s+ generic.min \s+ generic.max \s+ generic.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ generic.type \s+ generic.active \s+ generic.size \s+ generic.queue \s+ generic.queueSize \s+ generic.rejected \s+ generic.largest \s+ generic.completed \s+ generic.min \s+ generic.max \s+ generic.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -75,8 +75,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ get.type \s+ get.active \s+ get.size \s+ get.queue \s+ get.queueSize \s+ get.rejected \s+ get.largest \s+ get.completed \s+ get.min \s+ get.max \s+ get.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ get.type \s+ get.active \s+ get.size \s+ get.queue \s+ get.queueSize \s+ get.rejected \s+ get.largest \s+ get.completed \s+ get.min \s+ get.max \s+ get.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -85,8 +85,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ index.type \s+ index.active \s+ index.size \s+ index.queue \s+ index.queueSize \s+ index.rejected \s+ index.largest \s+ index.completed \s+ index.min \s+ index.max \s+ index.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ index.type \s+ index.active \s+ index.size \s+ index.queue \s+ index.queueSize \s+ index.rejected \s+ index.largest \s+ index.completed \s+ index.min \s+ index.max \s+ index.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -95,8 +95,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ management.type \s+ management.active \s+ management.size \s+ management.queue \s+ management.queueSize \s+ management.rejected \s+ management.largest \s+ management.completed \s+ management.min \s+ management.max \s+ management.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ management.type \s+ management.active \s+ management.size \s+ management.queue \s+ management.queueSize \s+ management.rejected \s+ management.largest \s+ management.completed \s+ management.min \s+ management.max \s+ management.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -105,8 +105,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ force_merge.type \s+ force_merge.active \s+ force_merge.size \s+ force_merge.queue \s+ force_merge.queueSize \s+ force_merge.rejected \s+ force_merge.largest \s+ force_merge.completed \s+ force_merge.min \s+ force_merge.max \s+ force_merge.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ force_merge.type \s+ force_merge.active \s+ force_merge.size \s+ force_merge.queue \s+ force_merge.queueSize \s+ force_merge.rejected \s+ force_merge.largest \s+ force_merge.completed \s+ force_merge.min \s+ force_merge.max \s+ force_merge.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -115,8 +115,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ percolate.type \s+ percolate.active \s+ percolate.size \s+ percolate.queue \s+ percolate.queueSize \s+ percolate.rejected \s+ percolate.largest \s+ percolate.completed \s+ percolate.min \s+ percolate.max \s+ percolate.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ percolate.type \s+ percolate.active \s+ percolate.size \s+ percolate.queue \s+ percolate.queueSize \s+ percolate.rejected \s+ percolate.largest \s+ percolate.completed \s+ percolate.min \s+ percolate.max \s+ percolate.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -125,8 +125,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ refresh.type \s+ refresh.active \s+ refresh.size \s+ refresh.queue \s+ refresh.queueSize \s+ refresh.rejected \s+ refresh.largest \s+ refresh.completed \s+ refresh.min \s+ refresh.max \s+ refresh.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ refresh.type \s+ refresh.active \s+ refresh.size \s+ refresh.queue \s+ refresh.queueSize \s+ refresh.rejected \s+ refresh.largest \s+ refresh.completed \s+ refresh.min \s+ refresh.max \s+ refresh.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -135,8 +135,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ search.type \s+ search.active \s+ search.size \s+ search.queue \s+ search.queueSize \s+ search.rejected \s+ search.largest \s+ search.completed \s+ search.min \s+ search.max \s+ search.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ search.type \s+ search.active \s+ search.size \s+ search.queue \s+ search.queueSize \s+ search.rejected \s+ search.largest \s+ search.completed \s+ search.min \s+ search.max \s+ search.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -145,8 +145,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ snapshot.type \s+ snapshot.active \s+ snapshot.size \s+ snapshot.queue \s+ snapshot.queueSize \s+ snapshot.rejected \s+ snapshot.largest \s+ snapshot.completed \s+ snapshot.min \s+ snapshot.max \s+ snapshot.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ snapshot.type \s+ snapshot.active \s+ snapshot.size \s+ snapshot.queue \s+ snapshot.queueSize \s+ snapshot.rejected \s+ snapshot.largest \s+ snapshot.completed \s+ snapshot.min \s+ snapshot.max \s+ snapshot.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -155,8 +155,8 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ suggest.type \s+ suggest.active \s+ suggest.size \s+ suggest.queue \s+ suggest.queueSize \s+ suggest.rejected \s+ suggest.largest \s+ suggest.completed \s+ suggest.min \s+ suggest.max \s+ suggest.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ suggest.type \s+ suggest.active \s+ suggest.size \s+ suggest.queue \s+ suggest.queueSize \s+ suggest.rejected \s+ suggest.largest \s+ suggest.completed \s+ suggest.min \s+ suggest.max \s+ suggest.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
||||
- do:
|
||||
cat.thread_pool:
|
||||
@ -165,5 +165,5 @@
|
||||
|
||||
- match:
|
||||
$body: |
|
||||
/^ id \s+ warmer.type \s+ warmer.active \s+ warmer.size \s+ warmer.queue \s+ warmer.queueSize \s+ warmer.rejected \s+ warmer.largest \s+ warmer.completed \s+ warmer.min \s+ warmer.max \s+ warmer.keepAlive \s+ \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/
|
||||
/^ id \s+ warmer.type \s+ warmer.active \s+ warmer.size \s+ warmer.queue \s+ warmer.queueSize \s+ warmer.rejected \s+ warmer.largest \s+ warmer.completed \s+ warmer.min \s+ warmer.max \s+ warmer.keepAlive \n
|
||||
(\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/
|
||||
|
@ -17,14 +17,12 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.mapper.attachments;
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
@ -37,22 +35,29 @@ import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
|
||||
class MapperTestUtils {
|
||||
|
||||
public class MapperTestUtils {
|
||||
|
||||
public static MapperService newMapperService(Path tempDir, Settings indexSettings) throws IOException {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put("path.home", tempDir)
|
||||
.build();
|
||||
indexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(indexSettings)
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("test"), indexSettings);
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(idxSettings);
|
||||
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap());
|
||||
IndicesModule indicesModule = new IndicesModule();
|
||||
indicesModule.registerMapper(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
|
||||
return newMapperService(tempDir, indexSettings, indicesModule);
|
||||
}
|
||||
|
||||
public static MapperService newMapperService(Path tempDir, Settings settings, IndicesModule indicesModule) throws IOException {
|
||||
Settings.Builder settingsBuilder = Settings.builder()
|
||||
.put("path.home", tempDir)
|
||||
.put(settings);
|
||||
if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) {
|
||||
settingsBuilder.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
|
||||
}
|
||||
Settings finalSettings = settingsBuilder.build();
|
||||
MapperRegistry mapperRegistry = indicesModule.getMapperRegistry();
|
||||
return new MapperService(idxSettings, analysisService, similarityService, mapperRegistry);
|
||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("test"), finalSettings);
|
||||
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(finalSettings)).build(indexSettings);
|
||||
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
|
||||
return new MapperService(indexSettings,
|
||||
analysisService,
|
||||
similarityService,
|
||||
mapperRegistry);
|
||||
}
|
||||
}
|
@ -21,6 +21,7 @@ package org.elasticsearch.node;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.Collection;
|
||||
@ -39,7 +40,7 @@ public class MockNode extends Node {
|
||||
private Collection<Class<? extends Plugin>> plugins;
|
||||
|
||||
public MockNode(Settings settings, Version version, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
super(settings, version, classpathPlugins);
|
||||
super(InternalSettingsPreparer.prepareEnvironment(settings, null), version, classpathPlugins);
|
||||
this.version = version;
|
||||
this.plugins = classpathPlugins;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user